Started by user Jenkins Admin Obtained pipelines/pingcap/tiflow/latest/pull_cdc_integration_kafka_test.groovy from git https://github.com/PingCAP-QE/ci.git Loading library tipipeline@main Library tipipeline@main is cached. Copying from home. [Pipeline] Start of Pipeline [Pipeline] readJSON [Pipeline] readTrusted Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1853-tcg19-rx4k4 Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1853-tcg19-rx4k4 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1853-tcg19-3w5pd --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "359c9202cfe3596641049c5dd35ad32d7afe8820" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1853-tcg19" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-tcg19-rx4k4" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-tcg19-rx4k4" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-tcg19-rx4k4" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1853-tcg19-rx4k4 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] stage [Pipeline] { (Declarative: Checkout SCM) [Pipeline] checkout The recommended git tool is: git No credentials specified Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git rev-list --no-walk 03312178c534dce949face80c69812d989e55009 # timeout=10 [Pipeline] } [Pipeline] // stage [Pipeline] withEnv [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] timeout Timeout set to expire in 1 hr 5 min [Pipeline] { [Pipeline] stage [Pipeline] { (Debug info) [Pipeline] sh + printenv PROW_JOB_ID=760730ca-f4a4-4d97-abc9-636483ce777c JENKINS_NODE_COOKIE=6c2dd3e8-8c4a-426d-8dc3-06416a1ba8af BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-tcg19-rx4k4 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Debug info BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786756543966875649","prowjobid":"760730ca-f4a4-4d97-abc9-636483ce777c","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"6a342866deda3271b067f649c64b771bbe3d2a00","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/6a342866deda3271b067f649c64b771bbe3d2a00","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct _=/usr/bin/printenv POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test HUDSON_URL=https://do.pingcap.net/jenkins/ JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1853 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786756543966875649 GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=tests SHLVL=3 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-tcg19 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-236df335481f9578f70eb859f68d5ceead3aa27f6c9385fda1ec4c08661c0305 NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-tcg19 pingcap-tiflow-pull-cdc-integration-kafka-test-1853-tcg19-rx4k4 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-tcg19-rx4k4 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1853 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz + echo ------------------------- ------------------------- + go env GO111MODULE='' GOARCH='amd64' GOBIN='' GOCACHE='/home/jenkins/.cache/go-build' GOENV='/home/jenkins/.config/go/env' GOEXE='' GOEXPERIMENT='' GOFLAGS='' GOHOSTARCH='amd64' GOHOSTOS='linux' GOINSECURE='' GOMODCACHE='/go/pkg/mod' GONOPROXY='' GONOSUMDB='' GOOS='linux' GOPATH='/go' GOPRIVATE='' GOPROXY='http://goproxy.apps.svc,https://proxy.golang.org,direct' GOROOT='/usr/local/go' GOSUMDB='sum.golang.org' GOTMPDIR='' GOTOOLCHAIN='auto' GOTOOLDIR='/usr/local/go/pkg/tool/linux_amd64' GOVCS='' GOVERSION='go1.21.0' GCCGO='gccgo' GOAMD64='v1' AR='ar' CC='gcc' CXX='g++' CGO_ENABLED='1' GOMOD='/dev/null' GOWORK='' CGO_CFLAGS='-O2 -g' CGO_CPPFLAGS='' CGO_CXXFLAGS='-O2 -g' CGO_FFLAGS='-O2 -g' CGO_LDFLAGS='-O2 -g' PKG_CONFIG='pkg-config' GOGCCFLAGS='-fPIC -m64 -pthread -Wl,--no-gc-sections -fmessage-length=0 -fdebug-prefix-map=/tmp/go-build2345394530=/tmp/go-build -gno-record-gcc-switches' + echo ------------------------- ------------------------- + echo 'debug command: kubectl -n jenkins-tiflow exec -ti pingcap-tiflow-pull-cdc-integration-kafka-test-1853-tcg19-rx4k4 bash' debug command: kubectl -n jenkins-tiflow exec -ti pingcap-tiflow-pull-cdc-integration-kafka-test-1853-tcg19-rx4k4 bash [Pipeline] container [Pipeline] { [Pipeline] sh + dig github.com ; <<>> DiG 9.18.16 <<>> github.com ;; global options: +cmd ;; Got answer: ;; ->>HEADER<<- opcode: QUERY, status: NOERROR, id: 13152 ;; flags: qr aa rd ra; QUERY: 1, ANSWER: 1, AUTHORITY: 0, ADDITIONAL: 1 ;; OPT PSEUDOSECTION: ; EDNS: version: 0, flags:; udp: 1232 ; COOKIE: 433e76be10866ec2 (echoed) ;; QUESTION SECTION: ;github.com. IN A ;; ANSWER SECTION: github.com. 20 IN A 20.205.243.166 ;; Query time: 0 msec ;; SERVER: 169.254.25.10#53(169.254.25.10) (UDP) ;; WHEN: Sat May 04 13:56:02 UTC 2024 ;; MSG SIZE rcvd: 77 [Pipeline] script [Pipeline] { [Pipeline] } [Pipeline] // script [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Check diff files) [Pipeline] container [Pipeline] { [Pipeline] script [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $token [Pipeline] { [Pipeline] httpRequest Warning: A secret was passed to "httpRequest" using Groovy String interpolation, which is insecure. Affected argument(s) used the following variable(s): [token] See https://jenkins.io/redirect/groovy-string-interpolation for details. HttpMethod: GET URL: https://api.github.com/repos/pingcap/tiflow/pulls/10919/files?page=1&per_page=100 Content-Type: application/json Authorization: ***** Sending request to url: https://api.github.com/repos/pingcap/tiflow/pulls/10919/files?page=1&per_page=100 Response Code: HTTP/1.1 200 OK Success: Status code 200 is in the accepted range: 100:399 [Pipeline] httpRequest Warning: A secret was passed to "httpRequest" using Groovy String interpolation, which is insecure. Affected argument(s) used the following variable(s): [token] See https://jenkins.io/redirect/groovy-string-interpolation for details. HttpMethod: GET URL: https://api.github.com/repos/pingcap/tiflow/pulls/10919/files?page=2&per_page=100 Content-Type: application/json Authorization: ***** Sending request to url: https://api.github.com/repos/pingcap/tiflow/pulls/10919/files?page=2&per_page=100 Response Code: HTTP/1.1 200 OK Success: Status code 200 is in the accepted range: 100:399 [Pipeline] } [Pipeline] // withCredentials [Pipeline] echo pr_diff_files: [cdc/model/kv.go, cdc/model/sink.go, cdc/model/sink_test.go, cdc/processor/processor.go, cdc/processor/sinkmanager/manager.go, cdc/processor/sourcemanager/manager.go, cdc/redo/reader/reader.go, cdc/sink/dmlsink/factory/factory.go, cdc/sink/dmlsink/txn/mysql/mysql.go, cdc/sink/dmlsink/txn/mysql/mysql_test.go, cmd/kafka-consumer/main.go, cmd/pulsar-consumer/main.go, cmd/storage-consumer/main.go, errors.toml, pkg/applier/redo.go, pkg/applier/redo_test.go, pkg/errors/cdc_errors.go, pkg/errors/helper.go, pkg/sink/codec/open/open_protocol_decoder.go, tests/integration_tests/changefeed_dup_error_restart/conf/diff_config.toml, tests/integration_tests/changefeed_dup_error_restart/conf/workload, tests/integration_tests/changefeed_dup_error_restart/run.sh, tests/integration_tests/force_replicate_table/run.sh, tests/integration_tests/run_group.sh] [Pipeline] echo diff file not matched: cdc/model/kv.go [Pipeline] } [Pipeline] // script [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Checkout) [Pipeline] timeout Timeout set to expire in 10 min [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache Cache restored successfully (git/pingcap/tiflow/rev-be15534) 203635712 bytes in 1.22 secs (166305531 bytes/sec) [Pipeline] { [Pipeline] retry [Pipeline] { [Pipeline] script [Pipeline] { [Pipeline] sh git version 2.36.6 Reinitialized existing Git repository in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/.git/ .git HEAD is now at be1553484 codec(ticdc): avro simplify the unit test (#11010) POST git-upload-pack (656 bytes) POST git-upload-pack (973 bytes) From https://github.com/pingcap/tiflow = [up to date] master -> origin/master * [new ref] refs/pull/10919/head -> origin/pr/10919/head HEAD is now at be1553484 codec(ticdc): avro simplify the unit test (#11010) ๐Ÿšง Checkouting to base SHA:be1553484fe4c03594eabb8d7435c694e5fd7224... HEAD is now at be1553484 codec(ticdc): avro simplify the unit test (#11010) โœ… Checked. ๐ŸŽ‰ ๐Ÿงพ HEAD info: be1553484fe4c03594eabb8d7435c694e5fd7224 be1553484 codec(ticdc): avro simplify the unit test (#11010) 2a7a65c6f Support Sequences (#10203) 36e9e1bf6 cli(ticdc): allow client authentication to be enabled without tls (#11005) ๐Ÿšง Pre-merge heads of pull requests to base SHA: be1553484fe4c03594eabb8d7435c694e5fd7224 ... Updating be1553484..6a342866d Fast-forward cdc/model/kv.go | 5 + cdc/model/sink.go | 35 ++- cdc/model/sink_test.go | 9 +- cdc/processor/processor.go | 21 +- cdc/processor/sinkmanager/manager.go | 5 + cdc/processor/sourcemanager/manager.go | 66 +++- cdc/redo/reader/reader.go | 21 +- cdc/sink/dmlsink/factory/factory.go | 8 +- cdc/sink/dmlsink/txn/mysql/mysql.go | 89 +++--- cdc/sink/dmlsink/txn/mysql/mysql_test.go | 2 +- cmd/kafka-consumer/main.go | 4 +- cmd/pulsar-consumer/main.go | 17 +- cmd/storage-consumer/main.go | 4 +- errors.toml | 5 + pkg/applier/redo.go | 303 +++++++++++++++++- pkg/applier/redo_test.go | 347 ++++++++++++++++++++- pkg/errors/cdc_errors.go | 4 + pkg/errors/helper.go | 19 ++ pkg/sink/codec/open/open_protocol_decoder.go | 3 + .../conf/diff_config.toml | 29 ++ .../changefeed_dup_error_restart/conf/workload | 13 + .../changefeed_dup_error_restart/run.sh | 54 ++++ .../integration_tests/force_replicate_table/run.sh | 4 +- tests/integration_tests/run_group.sh | 2 +- 24 files changed, 971 insertions(+), 98 deletions(-) create mode 100644 tests/integration_tests/changefeed_dup_error_restart/conf/diff_config.toml create mode 100644 tests/integration_tests/changefeed_dup_error_restart/conf/workload create mode 100755 tests/integration_tests/changefeed_dup_error_restart/run.sh ๐Ÿงพ Pre-merged result: 6a342866deda3271b067f649c64b771bbe3d2a00 6a342866d fix bit test 0dd104704 fix 5b7ca6f90 f โœ… Pre merged ๐ŸŽ‰ โœ… ~~~~~All done.~~~~~~ [Pipeline] } [Pipeline] // script [Pipeline] } [Pipeline] // retry [Pipeline] } Cache saved successfully (git/pingcap/tiflow/rev-be15534-6a34286) 203830272 bytes in 3.36 secs (60659923 bytes/sec) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (prepare) [Pipeline] timeout Timeout set to expire in 20 min [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/third_party_download [Pipeline] { [Pipeline] retry [Pipeline] { [Pipeline] sh + cd ../tiflow + ./scripts/download-integration-test-binaries.sh master Download binaries... % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 971 0 --:--:-- --:--:-- --:--:-- 976 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 1487 0 --:--:-- --:--:-- --:--:-- 1518 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 1001 0 --:--:-- --:--:-- --:--:-- 1000 100 41 100 41 0 0 999 0 --:--:-- --:--:-- --:--:-- 1000 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 2231 0 --:--:-- --:--:-- --:--:-- 2277 >>> download tidb-server.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/tidb/600b2ed4bf0aa38224a1c4c4c68831820735515c/centos7/tidb-server.tar.gz 2024-05-04 21:56:23 URL:http://fileserver.pingcap.net/download/builds/pingcap/tidb/600b2ed4bf0aa38224a1c4c4c68831820735515c/centos7/tidb-server.tar.gz [536570515/536570515] -> "tmp/tidb-server.tar.gz" [1] >>> download pd-server.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/pd/1679dbca25b3483d1375c7e747da27e99ad77360/centos7/pd-server.tar.gz 2024-05-04 21:56:34 URL:http://fileserver.pingcap.net/download/builds/pingcap/pd/1679dbca25b3483d1375c7e747da27e99ad77360/centos7/pd-server.tar.gz [187372022/187372022] -> "tmp/pd-server.tar.gz" [1] >>> download tikv-server.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/tikv/72a0fd5b00235a7c56014b77ddd933e2a0d33c88/centos7/tikv-server.tar.gz 2024-05-04 21:56:49 URL:http://fileserver.pingcap.net/download/builds/pingcap/tikv/72a0fd5b00235a7c56014b77ddd933e2a0d33c88/centos7/tikv-server.tar.gz [919098782/919098782] -> "tmp/tikv-server.tar.gz" [1] >>> download tiflash.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/tiflash/master/8e170090fad91c94bef8d908e21c195c1d145b02/centos7/tiflash.tar.gz 2024-05-04 21:57:04 URL:http://fileserver.pingcap.net/download/builds/pingcap/tiflash/master/8e170090fad91c94bef8d908e21c195c1d145b02/centos7/tiflash.tar.gz [456057803/456057803] -> "tmp/tiflash.tar.gz" [1] >>> download minio.tar.gz from http://fileserver.pingcap.net/download/minio.tar.gz 2024-05-04 21:57:09 URL:http://fileserver.pingcap.net/download/minio.tar.gz [17718777/17718777] -> "tmp/minio.tar.gz" [1] >>> download go-ycsb from http://fileserver.pingcap.net/download/builds/pingcap/go-ycsb/test-br/go-ycsb 2024-05-04 21:57:10 URL:http://fileserver.pingcap.net/download/builds/pingcap/go-ycsb/test-br/go-ycsb [45975512/45975512] -> "third_bin/go-ycsb" [1] >>> download jq from http://fileserver.pingcap.net/download/builds/pingcap/test/jq-1.6/jq-linux64 2024-05-04 21:57:10 URL:http://fileserver.pingcap.net/download/builds/pingcap/test/jq-1.6/jq-linux64 [3953824/3953824] -> "third_bin/jq" [1] >>> download etcd.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/cdc/etcd-v3.4.7-linux-amd64.tar.gz 2024-05-04 21:57:11 URL:http://fileserver.pingcap.net/download/builds/pingcap/cdc/etcd-v3.4.7-linux-amd64.tar.gz [17310840/17310840] -> "tmp/etcd.tar.gz" [1] >>> download sync_diff_inspector.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/cdc/sync_diff_inspector_hash-d671b084_linux-amd64.tar.gz 2024-05-04 21:57:14 URL:http://fileserver.pingcap.net/download/builds/pingcap/cdc/sync_diff_inspector_hash-d671b084_linux-amd64.tar.gz [79877126/79877126] -> "tmp/sync_diff_inspector.tar.gz" [1] >>> download schema-registry.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/cdc/schema-registry.tar.gz 2024-05-04 21:57:20 URL:http://fileserver.pingcap.net/download/builds/pingcap/cdc/schema-registry.tar.gz [278386006/278386006] -> "tmp/schema-registry.tar.gz" [1] Download SUCCESS + ls -alh ./bin total 1.9G drwxr-sr-x. 6 jenkins jenkins 4.0K May 4 21:57 . drwxr-sr-x. 19 jenkins jenkins 4.0K May 4 21:57 .. drwxr-sr-x. 2 jenkins jenkins 4.0K May 19 2023 bin drwxr-sr-x. 4 jenkins jenkins 4.0K May 10 2023 etc -rwxr-xr-x. 1 jenkins jenkins 17M Apr 2 2020 etcdctl -rwxr-xr-x. 1 jenkins jenkins 44M May 4 21:57 go-ycsb -rwxr-xr-x. 1 jenkins jenkins 3.8M May 4 21:57 jq drwxr-sr-x. 3 jenkins jenkins 4.0K May 10 2023 lib lrwxrwxrwx. 1 jenkins jenkins 13 Apr 30 11:15 libc++.so.1 -> libc++.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 1016K Nov 7 01:00 libc++.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 16 Apr 30 11:15 libc++abi.so.1 -> libc++abi.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 358K Nov 7 01:00 libc++abi.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 13 Apr 30 11:15 libgmssl.so -> libgmssl.so.3 lrwxrwxrwx. 1 jenkins jenkins 15 Apr 30 11:15 libgmssl.so.3 -> libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 2.6M Apr 30 10:34 libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 272M Apr 30 11:16 libtiflash_proxy.so -rwxr-xr-x. 1 jenkins jenkins 50M Jul 29 2020 minio -rwxr-xr-x. 1 jenkins jenkins 37M Apr 30 16:11 pd-api-bench -rwxr-xr-x. 1 jenkins jenkins 44M Apr 30 16:10 pd-ctl -rwxr-xr-x. 1 jenkins jenkins 36M Apr 30 16:10 pd-heartbeat-bench -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:10 pd-recover -rwxr-xr-x. 1 jenkins jenkins 106M Apr 30 16:10 pd-server -rwxr-xr-x. 1 jenkins jenkins 26M Apr 30 16:10 pd-tso-bench -rwxr-xr-x. 1 jenkins jenkins 3.0M Apr 30 16:11 pd-ut -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:10 regions-dump drwxr-sr-x. 4 jenkins jenkins 4.0K May 10 2023 share -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:11 stores-dump -rwxr-xr-x. 1 jenkins jenkins 192M Sep 22 2023 sync_diff_inspector -rwxr-xr-x. 1 jenkins jenkins 208M May 1 10:57 tidb-server -rwxr-xr-x. 1 jenkins jenkins 380M Apr 30 11:15 tiflash -rwxr-xr-x. 1 jenkins jenkins 418M Apr 30 11:29 tikv-server -rwxr-xr-x. 1 jenkins jenkins 2.0M Apr 30 16:11 xprog + make check_third_party_binary /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/tidb-server /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/tikv-server /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/pd-server /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/tiflash /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/pd-ctl /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/sync_diff_inspector /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/go-ycsb /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/etcdctl /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/jq /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/minio /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/bin/schema-registry-start + cd - /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/third_party_download + mkdir -p bin + mv ../tiflow/bin/bin ../tiflow/bin/etc ../tiflow/bin/etcdctl ../tiflow/bin/go-ycsb ../tiflow/bin/jq ../tiflow/bin/lib ../tiflow/bin/libc++.so.1 ../tiflow/bin/libc++.so.1.0 ../tiflow/bin/libc++abi.so.1 ../tiflow/bin/libc++abi.so.1.0 ../tiflow/bin/libgmssl.so ../tiflow/bin/libgmssl.so.3 ../tiflow/bin/libgmssl.so.3.0 ../tiflow/bin/libtiflash_proxy.so ../tiflow/bin/minio ../tiflow/bin/pd-api-bench ../tiflow/bin/pd-ctl ../tiflow/bin/pd-heartbeat-bench ../tiflow/bin/pd-recover ../tiflow/bin/pd-server ../tiflow/bin/pd-tso-bench ../tiflow/bin/pd-ut ../tiflow/bin/regions-dump ../tiflow/bin/share ../tiflow/bin/stores-dump ../tiflow/bin/sync_diff_inspector ../tiflow/bin/tidb-server ../tiflow/bin/tiflash ../tiflow/bin/tikv-server ../tiflow/bin/xprog ./bin/ + ls -alh ./bin total 1.9G drwxr-sr-x. 6 jenkins jenkins 4.0K May 4 21:57 . drwxr-sr-x. 3 jenkins jenkins 4.0K May 4 21:57 .. drwxr-sr-x. 2 jenkins jenkins 4.0K May 19 2023 bin drwxr-sr-x. 4 jenkins jenkins 4.0K May 10 2023 etc -rwxr-xr-x. 1 jenkins jenkins 17M Apr 2 2020 etcdctl -rwxr-xr-x. 1 jenkins jenkins 44M May 4 21:57 go-ycsb -rwxr-xr-x. 1 jenkins jenkins 3.8M May 4 21:57 jq drwxr-sr-x. 3 jenkins jenkins 4.0K May 10 2023 lib lrwxrwxrwx. 1 jenkins jenkins 13 Apr 30 11:15 libc++.so.1 -> libc++.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 1016K Nov 7 01:00 libc++.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 16 Apr 30 11:15 libc++abi.so.1 -> libc++abi.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 358K Nov 7 01:00 libc++abi.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 13 Apr 30 11:15 libgmssl.so -> libgmssl.so.3 lrwxrwxrwx. 1 jenkins jenkins 15 Apr 30 11:15 libgmssl.so.3 -> libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 2.6M Apr 30 10:34 libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 272M Apr 30 11:16 libtiflash_proxy.so -rwxr-xr-x. 1 jenkins jenkins 50M Jul 29 2020 minio -rwxr-xr-x. 1 jenkins jenkins 37M Apr 30 16:11 pd-api-bench -rwxr-xr-x. 1 jenkins jenkins 44M Apr 30 16:10 pd-ctl -rwxr-xr-x. 1 jenkins jenkins 36M Apr 30 16:10 pd-heartbeat-bench -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:10 pd-recover -rwxr-xr-x. 1 jenkins jenkins 106M Apr 30 16:10 pd-server -rwxr-xr-x. 1 jenkins jenkins 26M Apr 30 16:10 pd-tso-bench -rwxr-xr-x. 1 jenkins jenkins 3.0M Apr 30 16:11 pd-ut -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:10 regions-dump drwxr-sr-x. 4 jenkins jenkins 4.0K May 10 2023 share -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:11 stores-dump -rwxr-xr-x. 1 jenkins jenkins 192M Sep 22 2023 sync_diff_inspector -rwxr-xr-x. 1 jenkins jenkins 208M May 1 10:57 tidb-server -rwxr-xr-x. 1 jenkins jenkins 380M Apr 30 11:15 tiflash -rwxr-xr-x. 1 jenkins jenkins 418M Apr 30 11:29 tikv-server -rwxr-xr-x. 1 jenkins jenkins 2.0M Apr 30 16:11 xprog + ./bin/tidb-server -V Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore + ./bin/pd-server -V Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 + ./bin/tikv-server -V TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + ./bin/tiflash --version TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored + ./bin/sync_diff_inspector --version App Name: sync_diff_inspector v2.0 Release Version: v7.4.0 Git Commit Hash: d671b0840063bc2532941f02e02e12627402844c Git Branch: heads/refs/tags/v7.4.0 UTC Build Time: 2023-09-22 03:51:56 Go Version: go1.21.1 [Pipeline] } [Pipeline] // retry [Pipeline] } [Pipeline] // dir [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache Cache not restored (no such key found) [Pipeline] { [Pipeline] sh + ls -alh ./bin total 8.0K drwxr-sr-x. 2 jenkins jenkins 4.0K May 4 21:57 . drwxr-sr-x. 19 jenkins jenkins 4.0K May 4 21:57 .. + '[' -f ./bin/cdc ']' + make cdc CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-52-g6a342866d" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 13:57:24" -X "github.com/pingcap/tiflow/pkg/version.GitHash=6a342866deda3271b067f649c64b771bbe3d2a00" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-52-g6a342866d"' -o bin/cdc ./cmd/cdc go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading github.com/spf13/cobra v1.8.0 go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/gin-gonic/gin v1.9.1 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/IBM/sarama v1.41.2 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/xdg/scram v1.0.5 go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/coreos/go-semver v0.3.1 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading golang.org/x/sync v0.7.0 go: downloading golang.org/x/net v0.24.0 go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/tinylib/msgp v1.1.6 go: downloading github.com/google/btree v1.1.2 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/KimMachineGun/automemlimit v0.2.4 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/apache/pulsar-client-go v0.11.0 go: downloading github.com/aws/aws-sdk-go-v2 v1.19.1 go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading github.com/DATA-DOG/go-sqlmock v1.5.0 go: downloading github.com/imdario/mergo v0.3.16 go: downloading github.com/google/uuid v1.6.0 go: downloading golang.org/x/time v0.5.0 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading go.etcd.io/etcd/server/v3 v3.5.12 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading github.com/gogo/protobuf v1.3.2 go: downloading github.com/swaggo/files v0.0.0-20210815190702-a29dd2bc99b2 go: downloading github.com/swaggo/gin-swagger v1.2.0 go: downloading github.com/r3labs/diff v1.1.0 go: downloading github.com/benbjohnson/clock v1.3.5 go: downloading github.com/stretchr/testify v1.9.0 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading go.uber.org/atomic v1.11.0 go: downloading github.com/YangKeao/seahash v0.0.0-20240229041150-e7bf269c3140 go: downloading github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 go: downloading cloud.google.com/go v0.112.2 go: downloading github.com/goccy/go-json v0.10.2 go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading github.com/robfig/cron v1.2.0 go: downloading github.com/soheilhy/cmux v0.1.5 go: downloading github.com/cenkalti/backoff/v4 v4.2.1 go: downloading github.com/jcmturner/gokrb5/v8 v8.4.4 go: downloading github.com/segmentio/kafka-go v0.4.41-0.20230526171612-f057b1d369cd go: downloading github.com/hashicorp/golang-lru v0.5.1 go: downloading golang.org/x/sys v0.19.0 go: downloading github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/phayes/freeport v0.0.0-20180830031419-95f893ade6f2 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/pierrec/lz4/v4 v4.1.18 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading github.com/uber-go/atomic v1.4.0 go: downloading go.etcd.io/etcd/pkg/v3 v3.5.12 go: downloading github.com/xdg/stringprep v1.0.3 go: downloading golang.org/x/crypto v0.22.0 go: downloading github.com/containerd/cgroups v1.0.4 go: downloading github.com/go-mysql-org/go-mysql v1.7.1-0.20240314115043-2199dfb0ba98 go: downloading github.com/pingcap/check v0.0.0-20211026125417-57bd13f7b5f0 go: downloading github.com/pingcap/tidb-dashboard v0.0.0-20240326110213-9768844ff5d7 go: downloading gorm.io/gorm v1.24.5 go: downloading github.com/gavv/monotime v0.0.0-20190418164738-30dba4353424 go: downloading github.com/swaggo/swag v1.16.3 go: downloading github.com/philhofer/fwd v1.1.1 go: downloading github.com/golang/mock v1.6.0 go: downloading golang.org/x/term v0.19.0 go: downloading github.com/gin-contrib/sse v0.1.0 go: downloading github.com/go-playground/validator/v10 v10.14.0 go: downloading github.com/pelletier/go-toml/v2 v2.0.8 go: downloading github.com/ugorji/go/codec v1.2.11 go: downloading google.golang.org/protobuf v1.33.0 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd go: downloading github.com/golang/protobuf v1.5.4 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/aws/smithy-go v1.13.5 go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/eapache/queue v1.1.0 go: downloading github.com/eapache/go-resiliency v1.4.0 go: downloading github.com/eapache/go-xerial-snappy v0.0.0-20230731223053-c322873962e3 go: downloading github.com/hashicorp/go-multierror v1.1.1 go: downloading github.com/jcmturner/gofork v1.7.6 go: downloading github.com/bits-and-blooms/bitset v1.4.0 go: downloading github.com/linkedin/goavro/v2 v2.11.1 go: downloading github.com/sirupsen/logrus v1.9.3 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading google.golang.org/api v0.170.0 go: downloading github.com/jcmturner/dnsutils/v2 v2.0.0 go: downloading github.com/hashicorp/go-uuid v1.0.3 go: downloading github.com/AthenZ/athenz v1.10.39 go: downloading github.com/grpc-ecosystem/grpc-gateway v1.16.0 go: downloading github.com/tmc/grpc-websocket-proxy v0.0.0-20220101234140-673ab2c3ae75 go: downloading go.etcd.io/bbolt v1.3.9 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.22.0 go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.22.0 go: downloading go.opentelemetry.io/otel/sdk v1.22.0 go: downloading sigs.k8s.io/yaml v1.4.0 go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading github.com/stretchr/objx v0.5.2 go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading golang.org/x/text v0.14.0 go: downloading github.com/xdg-go/scram v1.1.2 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/godbus/dbus/v5 v5.0.4 go: downloading github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 go: downloading github.com/opencontainers/runtime-spec v1.0.2 go: downloading github.com/cilium/ebpf v0.4.0 go: downloading github.com/Masterminds/semver v1.5.0 go: downloading github.com/siddontang/go-log v0.0.0-20180807004314-8d05993dda07 go: downloading github.com/siddontang/go v0.0.0-20180604090527-bdc77568d726 go: downloading github.com/pierrec/lz4 v2.6.1+incompatible go: downloading github.com/cakturk/go-netstat v0.0.0-20200220111822-e5b49efee7a5 go: downloading github.com/glebarez/sqlite v1.7.0 go: downloading gorm.io/driver/mysql v1.3.3 go: downloading github.com/jinzhu/now v1.1.5 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading github.com/joomcode/errorx v1.0.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/go-playground/universal-translator v0.18.1 go: downloading github.com/gabriel-vasile/mimetype v1.4.2 go: downloading github.com/KyleBanks/depth v1.2.1 go: downloading github.com/go-openapi/spec v0.21.0 go: downloading golang.org/x/tools v0.20.0 go: downloading github.com/leodido/go-urn v1.2.4 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading github.com/hashicorp/errwrap v1.0.0 go: downloading github.com/spaolacci/murmur3 v1.1.0 go: downloading golang.org/x/mod v0.17.0 go: downloading github.com/golang/snappy v0.0.4 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/jcmturner/aescts/v2 v2.0.0 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading github.com/99designs/keyring v1.2.1 go: downloading github.com/jcmturner/rpc/v2 v2.0.3 go: downloading github.com/aws/aws-sdk-go-v2/config v1.18.30 go: downloading github.com/aws/aws-sdk-go-v2/credentials v1.13.29 go: downloading github.com/aws/aws-sdk-go-v2/service/glue v1.58.1 go: downloading github.com/jarcoal/httpmock v1.2.0 go: downloading github.com/mailru/easyjson v0.7.7 go: downloading github.com/gorilla/websocket v1.5.1 go: downloading go.etcd.io/etcd/raft/v3 v3.5.12 go: downloading github.com/jonboulle/clockwork v0.4.0 go: downloading github.com/xiang90/probing v0.0.0-20221125231312-a49e3df8f510 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading go.opentelemetry.io/proto/otlp v1.1.0 go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading github.com/xdg-go/pbkdf2 v1.0.0 go: downloading github.com/xdg-go/stringprep v1.0.4 go: downloading github.com/shopspring/decimal v1.3.0 go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/jinzhu/inflection v1.0.0 go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/glebarez/go-sqlite v1.21.2 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/go-ozzo/ozzo-validation/v4 v4.3.0 go: downloading github.com/blacktear23/go-proxyprotocol v1.0.6 go: downloading github.com/pingcap/fn v1.0.0 go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/edwingeng/deque v0.0.0-20191220032131-8596380dee17 go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/tidwall/btree v1.7.0 go: downloading github.com/go-logr/logr v1.4.1 go: downloading github.com/sourcegraph/appdash-data v0.0.0-20151005221446-73f23eafcf67 go: downloading github.com/tiancaiamao/appdash v0.0.0-20181126055449-889f96f722a2 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading github.com/golang-jwt/jwt/v4 v4.5.0 go: downloading go.etcd.io/etcd/client/v2 v2.305.12 go: downloading github.com/go-logr/stdr v1.2.2 go: downloading github.com/go-playground/locales v0.14.1 go: downloading github.com/ardielle/ardielle-go v1.5.2 go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/dvsekhvalnov/jose2go v1.5.0 go: downloading github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c go: downloading github.com/mtibben/percent v0.2.1 go: downloading github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.1 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.6 go: downloading github.com/aws/aws-sdk-go-v2/internal/ini v1.3.37 go: downloading github.com/aws/aws-sdk-go-v2/service/sso v1.12.14 go: downloading github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.14 go: downloading github.com/aws/aws-sdk-go-v2/service/sts v1.20.1 go: downloading modernc.org/libc v1.37.1 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading modernc.org/sqlite v1.27.0 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/go-openapi/jsonpointer v0.21.0 go: downloading github.com/go-openapi/jsonreference v0.21.0 go: downloading github.com/go-openapi/swag v0.23.0 go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading github.com/jfcg/sixb v1.3.8 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading github.com/apache/thrift v0.16.0 go: downloading github.com/vbauerster/mpb/v7 v7.5.3 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.36 go: downloading github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.30 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading k8s.io/api v0.28.6 go: downloading github.com/emirpasic/gods v1.18.1 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.30 go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d go: downloading github.com/josharian/intern v1.0.0 go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/kr/text v0.2.0 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading k8s.io/apimachinery v0.28.6 go: downloading gopkg.in/inf.v0 v0.9.1 go: downloading sigs.k8s.io/structured-merge-diff/v4 v4.4.1 go: downloading sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd go: downloading k8s.io/klog/v2 v2.120.1 go: downloading github.com/google/gofuzz v1.2.0 go: downloading k8s.io/utils v0.0.0-20230726121419-3b25d923346b go: downloading modernc.org/memory v1.7.2 go: downloading modernc.org/mathutil v1.6.0 go: downloading github.com/jmespath/go-jmespath v0.4.0 go: downloading github.com/google/s2a-go v0.1.7 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/felixge/httpsnoop v1.0.4 + '[' -f ./bin/cdc_kafka_consumer ']' + make kafka_consumer CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-52-g6a342866d" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 13:59:22" -X "github.com/pingcap/tiflow/pkg/version.GitHash=6a342866deda3271b067f649c64b771bbe3d2a00" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-52-g6a342866d"' -o bin/cdc_kafka_consumer ./cmd/kafka-consumer/main.go + '[' -f ./bin/cdc_storage_consumer ']' + make storage_consumer CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-52-g6a342866d" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 13:59:29" -X "github.com/pingcap/tiflow/pkg/version.GitHash=6a342866deda3271b067f649c64b771bbe3d2a00" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-52-g6a342866d"' -o bin/cdc_storage_consumer ./cmd/storage-consumer/main.go + '[' -f ./bin/cdc.test ']' + make integration_test_build cd tools/check && GO111MODULE=on go build -mod=mod -o ../bin/failpoint-ctl github.com/pingcap/failpoint/failpoint-ctl go: downloading github.com/pingcap/failpoint v0.0.0-20210316064728-7acb0f0a3dfd go: downloading github.com/sergi/go-diff v1.1.0 CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-52-g6a342866d" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 13:59:36" -X "github.com/pingcap/tiflow/pkg/version.GitHash=6a342866deda3271b067f649c64b771bbe3d2a00" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-52-g6a342866d"' -o bin/cdc_storage_consumer ./cmd/storage-consumer/main.go CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-52-g6a342866d" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 13:59:36" -X "github.com/pingcap/tiflow/pkg/version.GitHash=6a342866deda3271b067f649c64b771bbe3d2a00" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-52-g6a342866d"' -o bin/cdc_kafka_consumer ./cmd/kafka-consumer/main.go CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-52-g6a342866d" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 13:59:36" -X "github.com/pingcap/tiflow/pkg/version.GitHash=6a342866deda3271b067f649c64b771bbe3d2a00" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-52-g6a342866d"' -o bin/cdc_pulsar_consumer ./cmd/pulsar-consumer/main.go CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-52-g6a342866d" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 13:59:36" -X "github.com/pingcap/tiflow/pkg/version.GitHash=6a342866deda3271b067f649c64b771bbe3d2a00" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-52-g6a342866d"' -o bin/oauth2-server ./cmd/oauth2-server/main.go go: downloading github.com/go-oauth2/oauth2/v4 v4.5.2 go: downloading github.com/tidwall/buntdb v1.3.0 go: downloading github.com/tidwall/rtred v0.1.2 go: downloading github.com/tidwall/gjson v1.14.3 go: downloading github.com/tidwall/grect v0.1.4 go: downloading github.com/tidwall/match v1.1.1 go: downloading github.com/tidwall/tinyqueue v0.1.1 go: downloading github.com/tidwall/pretty v1.2.0 $(echo $(for p in $(go list ./... | grep -vE 'vendor|proto|tiflow/tests|integration|testing_utils|pb|pbmock|tiflow/bin'); do echo ${p#"github.com/pingcap/tiflow/"}|grep -v "github.com/pingcap/tiflow"; done) | xargs tools/bin/failpoint-ctl enable >/dev/null) go: downloading github.com/PingCAP-QE/go-sqlsmith v0.0.0-20231213065948-336e064b488d go: downloading github.com/chzyer/readline v1.5.1 go: downloading github.com/deepmap/oapi-codegen v1.9.0 go: downloading github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 go: downloading github.com/getkin/kin-openapi v0.80.0 go: downloading github.com/gogo/gateway v1.1.0 go: downloading github.com/syndtr/goleveldb v1.0.1-0.20210305035536-64b5b1c73954 go: downloading go.uber.org/dig v1.13.0 go: downloading github.com/mattn/go-shellwords v1.0.12 go: downloading github.com/DataDog/zstd v1.5.5 go: downloading github.com/shurcooL/httpgzip v0.0.0-20190720172056-320755c1c1b0 go: downloading go.uber.org/ratelimit v0.2.0 go: downloading github.com/VividCortex/mysqlerr v1.0.0 go: downloading go.uber.org/goleak v1.3.0 go: downloading github.com/ngaut/log v0.0.0-20210830112240-0124ec040aeb go: downloading github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129 go: downloading github.com/bradleyjkemp/grpc-tools v0.2.5 go: downloading github.com/integralist/go-findroot v0.0.0-20160518114804-ac90681525dc go: downloading github.com/jmoiron/sqlx v1.3.3 go: downloading upper.io/db.v3 v3.7.1+incompatible go: downloading github.com/ghodss/yaml v1.0.0 go: downloading github.com/improbable-eng/grpc-web v0.12.0 go: downloading github.com/rs/cors v1.7.0 go: downloading github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f CGO_ENABLED=1 GO111MODULE=on go test -p 3 --race --tags=intest -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-52-g6a342866d" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 13:59:36" -X "github.com/pingcap/tiflow/pkg/version.GitHash=6a342866deda3271b067f649c64b771bbe3d2a00" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-52-g6a342866d"' -c -cover -covermode=atomic \ -coverpkg=github.com/pingcap/tiflow/... \ -o bin/cdc.test github.com/pingcap/tiflow/cmd/cdc \ || { $(echo $(for p in $(go list ./... | grep -vE 'vendor|proto|tiflow/tests|integration|testing_utils|pb|pbmock|tiflow/bin'); do echo ${p#"github.com/pingcap/tiflow/"}|grep -v "github.com/pingcap/tiflow"; done) | xargs tools/bin/failpoint-ctl disable >/dev/null); exit 1; } CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-52-g6a342866d" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 13:59:36" -X "github.com/pingcap/tiflow/pkg/version.GitHash=6a342866deda3271b067f649c64b771bbe3d2a00" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-52-g6a342866d"' -o bin/cdc ./cmd/cdc/main.go \ || { $(echo $(for p in $(go list ./... | grep -vE 'vendor|proto|tiflow/tests|integration|testing_utils|pb|pbmock|tiflow/bin'); do echo ${p#"github.com/pingcap/tiflow/"}|grep -v "github.com/pingcap/tiflow"; done) | xargs tools/bin/failpoint-ctl disable >/dev/null); exit 1; } $(echo $(for p in $(go list ./... | grep -vE 'vendor|proto|tiflow/tests|integration|testing_utils|pb|pbmock|tiflow/bin'); do echo ${p#"github.com/pingcap/tiflow/"}|grep -v "github.com/pingcap/tiflow"; done) | xargs tools/bin/failpoint-ctl disable >/dev/null) + ls -alh ./bin total 1.2G drwxr-sr-x. 2 jenkins jenkins 4.0K May 4 22:04 . drwxr-sr-x. 19 jenkins jenkins 4.0K May 4 21:57 .. -rwxr-xr-x. 1 jenkins jenkins 220M May 4 22:04 cdc -rwxr-xr-x. 1 jenkins jenkins 359M May 4 22:03 cdc.test -rwxr-xr-x. 1 jenkins jenkins 183M May 4 21:59 cdc_kafka_consumer -rwxr-xr-x. 1 jenkins jenkins 183M May 4 22:00 cdc_pulsar_consumer -rwxr-xr-x. 1 jenkins jenkins 182M May 4 21:59 cdc_storage_consumer -rwxr-xr-x. 1 jenkins jenkins 12M May 4 22:00 oauth2-server + ./bin/cdc version Release Version: v8.2.0-alpha-52-g6a342866d Git Commit Hash: 6a342866deda3271b067f649c64b771bbe3d2a00 Git Branch: HEAD UTC Build Time: 2024-05-04 13:59:36 Go Version: go version go1.21.0 linux/amd64 Failpoint Build: true [Pipeline] } Cache saved successfully (binary/pingcap/tiflow/cdc-integration-test/rev-be15534-6a34286) 1191701504 bytes in 28.64 secs (41609653 bytes/sec) [Pipeline] // cache [Pipeline] cache Cache not restored (no such key found) [Pipeline] { [Pipeline] sh + cp -r ../third_party_download/bin/bin ../third_party_download/bin/etc ../third_party_download/bin/etcdctl ../third_party_download/bin/go-ycsb ../third_party_download/bin/jq ../third_party_download/bin/lib ../third_party_download/bin/libc++.so.1 ../third_party_download/bin/libc++.so.1.0 ../third_party_download/bin/libc++abi.so.1 ../third_party_download/bin/libc++abi.so.1.0 ../third_party_download/bin/libgmssl.so ../third_party_download/bin/libgmssl.so.3 ../third_party_download/bin/libgmssl.so.3.0 ../third_party_download/bin/libtiflash_proxy.so ../third_party_download/bin/minio ../third_party_download/bin/pd-api-bench ../third_party_download/bin/pd-ctl ../third_party_download/bin/pd-heartbeat-bench ../third_party_download/bin/pd-recover ../third_party_download/bin/pd-server ../third_party_download/bin/pd-tso-bench ../third_party_download/bin/pd-ut ../third_party_download/bin/regions-dump ../third_party_download/bin/share ../third_party_download/bin/stores-dump ../third_party_download/bin/sync_diff_inspector ../third_party_download/bin/tidb-server ../third_party_download/bin/tiflash ../third_party_download/bin/tikv-server ../third_party_download/bin/xprog ./bin/ + ls -alh ./bin total 3.0G drwxr-sr-x. 6 jenkins jenkins 4.0K May 4 22:04 . drwxr-sr-x. 19 jenkins jenkins 4.0K May 4 21:57 .. drwxr-sr-x. 2 jenkins jenkins 4.0K May 4 22:04 bin -rwxr-xr-x. 1 jenkins jenkins 220M May 4 22:04 cdc -rwxr-xr-x. 1 jenkins jenkins 359M May 4 22:03 cdc.test -rwxr-xr-x. 1 jenkins jenkins 183M May 4 21:59 cdc_kafka_consumer -rwxr-xr-x. 1 jenkins jenkins 183M May 4 22:00 cdc_pulsar_consumer -rwxr-xr-x. 1 jenkins jenkins 182M May 4 21:59 cdc_storage_consumer drwxr-sr-x. 4 jenkins jenkins 4.0K May 4 22:04 etc -rwxr-xr-x. 1 jenkins jenkins 17M May 4 22:04 etcdctl -rwxr-xr-x. 1 jenkins jenkins 44M May 4 22:04 go-ycsb -rwxr-xr-x. 1 jenkins jenkins 3.8M May 4 22:04 jq drwxr-sr-x. 3 jenkins jenkins 4.0K May 4 22:04 lib lrwxrwxrwx. 1 jenkins jenkins 13 May 4 22:04 libc++.so.1 -> libc++.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 1016K May 4 22:04 libc++.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 16 May 4 22:04 libc++abi.so.1 -> libc++abi.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 358K May 4 22:04 libc++abi.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 13 May 4 22:04 libgmssl.so -> libgmssl.so.3 lrwxrwxrwx. 1 jenkins jenkins 15 May 4 22:04 libgmssl.so.3 -> libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 2.6M May 4 22:04 libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 272M May 4 22:04 libtiflash_proxy.so -rwxr-xr-x. 1 jenkins jenkins 50M May 4 22:04 minio -rwxr-xr-x. 1 jenkins jenkins 12M May 4 22:00 oauth2-server -rwxr-xr-x. 1 jenkins jenkins 37M May 4 22:04 pd-api-bench -rwxr-xr-x. 1 jenkins jenkins 44M May 4 22:04 pd-ctl -rwxr-xr-x. 1 jenkins jenkins 36M May 4 22:04 pd-heartbeat-bench -rwxr-xr-x. 1 jenkins jenkins 32M May 4 22:04 pd-recover -rwxr-xr-x. 1 jenkins jenkins 106M May 4 22:04 pd-server -rwxr-xr-x. 1 jenkins jenkins 26M May 4 22:04 pd-tso-bench -rwxr-xr-x. 1 jenkins jenkins 3.0M May 4 22:04 pd-ut -rwxr-xr-x. 1 jenkins jenkins 32M May 4 22:04 regions-dump drwxr-sr-x. 4 jenkins jenkins 4.0K May 4 22:04 share -rwxr-xr-x. 1 jenkins jenkins 32M May 4 22:04 stores-dump -rwxr-xr-x. 1 jenkins jenkins 192M May 4 22:04 sync_diff_inspector -rwxr-xr-x. 1 jenkins jenkins 208M May 4 22:04 tidb-server -rwxr-xr-x. 1 jenkins jenkins 380M May 4 22:04 tiflash -rwxr-xr-x. 1 jenkins jenkins 418M May 4 22:04 tikv-server -rwxr-xr-x. 1 jenkins jenkins 2.0M May 4 22:04 xprog [Pipeline] } Cache saved successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc) 3727130624 bytes in 67.18 secs (55476449 bytes/sec) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Tests) [Pipeline] parallel [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G00') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G01') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G02') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G03') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G04') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G05') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G06') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G07') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G08') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G09') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G10') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G11') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G12') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G13') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G14') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G15') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G16') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G17') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G00') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G01') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G02') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G03') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G04') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G05') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G06') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G07') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G08') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G09') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G10') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G11') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G12') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G13') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G14') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G15') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G16') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G17') [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1853-n5hp4-vjsxk Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1853-qf5bw-s1gf0 [Pipeline] node Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1853-c2q9n-lf6gc Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1853-mpv2f-zft7l Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fchds-2rh3m Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1853-qf5bw-s1gf0 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1853-qf5bw-qlt28 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "92df0354990f47623e46152ab88a1bed4e59b6ac" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1853-qf5bw" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-qf5bw-s1gf0" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-qf5bw-s1gf0" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-qf5bw-s1gf0" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1853-qf5bw-s1gf0 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] podTemplate [Pipeline] { [Pipeline] { [Pipeline] node [Pipeline] checkout The recommended git tool is: git Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@6e7ff758; decorates RemoteLauncher[hudson.remoting.Channel@7ef6735a:JNLP4-connect connection from 10.233.67.50/10.233.67.50:46594] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1853-3jhbg-sn2sk [Pipeline] node Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1853-v3qw7-1d792 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1853-r9cq4-jzgfv Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1853-c2q9n-lf6gc is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1853-c2q9n-czgvk --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "9f2f95e4529406f17346097bf48a7f195613597d" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1853-c2q9n" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-c2q9n-lf6gc" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-c2q9n-lf6gc" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-c2q9n-lf6gc" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Commit message: "fix(br): use failpoint tidb-server instead (#2951)" [Pipeline] withEnv [Pipeline] { Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1853-c2q9n-lf6gc in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] checkout [Pipeline] stage [Pipeline] { (Test) The recommended git tool is: git [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] { Still waiting to schedule task โ€˜pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6โ€™ is offline [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6j6mj-4nbr6 Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1853-lr873-brs0v --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "91e6216cc887b9f7273d76d154992ec8fccbdb2d" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1853-lr873" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@10ff4756; decorates RemoteLauncher[hudson.remoting.Channel@61a6e8ca:JNLP4-connect connection from 10.233.71.56/10.233.71.56:58290] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1853-n5hp4-vjsxk is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1853-n5hp4-b2g35 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "11c0f7cde562a037768cb8fb3768abf91c744816" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1853-n5hp4" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-n5hp4-vjsxk" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-n5hp4-vjsxk" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-n5hp4-vjsxk" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1853-n5hp4-vjsxk in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1853-fqx8d-gvkqk --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "3132ab13fa707759ff22780ffd2fd0a29fa5558b" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1853-fqx8d" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1853-6xzf3-b277m --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "800bcd4a419b1d8794a5ae0b22ebc5a205e1317c" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1853-6xzf3" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1853-1nlmb-11b5c --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "37c3eeffe7888c6301ff7b93190f788374445578" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1853-1nlmb" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Still waiting to schedule task โ€˜pingcap-tiflow-pull-cdc-integration-kafka-test-1853-r9cq4-jzgfvโ€™ is offline Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1853-r9cq4-jzgfv is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1853-r9cq4-qn9h9 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "6dc0b995ea2b0b5de1981dd8bf2a49a3fdb85261" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1853-r9cq4" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-r9cq4-jzgfv" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-r9cq4-jzgfv" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-r9cq4-jzgfv" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1853-r9cq4-jzgfv in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1853-5mh68-6cbv0 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "43253a739d43e3f5390130a561e02b04bd2515ae" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1853-5mh68" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fchds-2rh3m is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1853-fchds-zz2pw --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "6dfa088a1379f00d83a82194cec1047a4baa5aa1" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1853-fchds" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fchds-2rh3m" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fchds-2rh3m" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fchds-2rh3m" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fchds-2rh3m in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6j6mj-4nbr6 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1853-6j6mj-d8qmn --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "af9e0badd57f6a318af66c2e626c012939702d6e" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1853-6j6mj" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6j6mj-4nbr6" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6j6mj-4nbr6" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6j6mj-4nbr6" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6j6mj-4nbr6 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1853-v3qw7-1d792 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1853-v3qw7-7db8r --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "2959615e0e1d05743517b7cb73b49d168114f0bb" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1853-v3qw7" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-v3qw7-1d792" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-v3qw7-1d792" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-v3qw7-1d792" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1853-v3qw7-1d792 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Still waiting to schedule task Waiting for next available executor on โ€˜pingcap-tiflow-pull-cdc-integration-kafka-test-1853-mpv2f-zft7lโ€™ Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1853-mpv2f-zft7l is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1853-mpv2f-gfhbc --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "2663558777b2aa71a4c298648460a64f5e8c6850" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1853-mpv2f" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-mpv2f-zft7l" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-mpv2f-zft7l" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-mpv2f-zft7l" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1853-mpv2f-zft7l in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1853-3jhbg-sn2sk is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1853-3jhbg-qrqgv --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "9a90ddd3c110ce11cc7561445aea977b391b3fea" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1853-3jhbg" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-3jhbg-sn2sk" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-3jhbg-sn2sk" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-3jhbg-sn2sk" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1853-3jhbg-sn2sk in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9b5lh-d8gp8 Still waiting to schedule task โ€˜pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9b5lh-d8gp8โ€™ is offline Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9b5lh-d8gp8 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1853-9b5lh-r3q2z --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "bf4e771dfa57dc1afaccf386c880efd74fcaecaa" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1853-9b5lh" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9b5lh-d8gp8" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9b5lh-d8gp8" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9b5lh-d8gp8" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9b5lh-d8gp8 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc) 3727130624 bytes in 20.75 secs (179648165 bytes/sec) [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] node [Pipeline] node [Pipeline] node [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] sh [Pipeline] checkout The recommended git tool is: git [Pipeline] container [Pipeline] { [Pipeline] checkout [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1853-jvsw5-4r66k The recommended git tool is: git Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1853-kzqfw-6sn5g [Pipeline] checkout Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9mrfb-0nnvt The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git [Pipeline] stage The recommended git tool is: git [Pipeline] { (Test) [Pipeline] } [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@43b72a2b; decorates RemoteLauncher[hudson.remoting.Channel@e57b239:JNLP4-connect connection from 10.233.123.215/10.233.123.215:55848] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 [Pipeline] // timeout [Pipeline] } [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] // container [Pipeline] sh No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@6f49afac; decorates RemoteLauncher[hudson.remoting.Channel@56134a65:JNLP4-connect connection from 10.233.108.148/10.233.108.148:55164] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@4d6fca91; decorates RemoteLauncher[hudson.remoting.Channel@50187f1:JNLP4-connect connection from 10.233.107.142/10.233.107.142:42412] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@73e04aaa; decorates RemoteLauncher[hudson.remoting.Channel@2c5fc064:JNLP4-connect connection from 10.233.90.119/10.233.90.119:55388] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@76874163; decorates RemoteLauncher[hudson.remoting.Channel@1b98f76b:JNLP4-connect connection from 10.233.88.95/10.233.88.95:52572] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@1fae1ce8; decorates RemoteLauncher[hudson.remoting.Channel@22973886:JNLP4-connect connection from 10.233.106.74/10.233.106.74:53648] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@2c97d0ac; decorates RemoteLauncher[hudson.remoting.Channel@453d7acc:JNLP4-connect connection from 10.233.84.57/10.233.84.57:46588] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@1ed54e31; decorates RemoteLauncher[hudson.remoting.Channel@6f0388d5:JNLP4-connect connection from 10.233.97.2/10.233.97.2:48358] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 [Pipeline] { Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@15099ba1; decorates RemoteLauncher[hudson.remoting.Channel@15e29faf:JNLP4-connect connection from 10.233.86.239/10.233.86.239:35582] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 [Pipeline] cache No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@108a8d1; decorates RemoteLauncher[hudson.remoting.Channel@5bd39670:JNLP4-connect connection from 10.233.105.86/10.233.105.86:39894] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@4b14b4de; decorates RemoteLauncher[hudson.remoting.Channel@53bf9f37:JNLP4-connect connection from 10.233.70.234/10.233.70.234:58498] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@39aa4bdd; decorates RemoteLauncher[hudson.remoting.Channel@2d85ac48:JNLP4-connect connection from 10.233.100.29/10.233.100.29:58122] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G04 Run cases: foreign_key ddl_puller_lag ddl_only_block_related_table changefeed_auto_stop PROW_JOB_ID=760730ca-f4a4-4d97-abc9-636483ce777c JENKINS_NODE_COOKIE=8742f253-24aa-4577-836e-7e2fa278e403 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-qf5bw-s1gf0 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786756543966875649","prowjobid":"760730ca-f4a4-4d97-abc9-636483ce777c","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"6a342866deda3271b067f649c64b771bbe3d2a00","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/6a342866deda3271b067f649c64b771bbe3d2a00","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1853 TEST_GROUP=G04 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786756543966875649 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-qf5bw GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-qf5bw-s1gf0 pingcap_tiflow_pull_cdc_integration_kafka_test_1853-qf5bw GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-qf5bw-s1gf0 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1853 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/foreign_key/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@65ddd5d8; decorates RemoteLauncher[hudson.remoting.Channel@2859020c:JNLP4-connect connection from 10.233.66.33/10.233.66.33:52026] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9mrfb-0nnvt is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1853-9mrfb-0pvqb --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "ee633476b72e987a5a2445470c8e4c08cebbd0bd" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1853-9mrfb" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9mrfb-0nnvt" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9mrfb-0nnvt" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9mrfb-0nnvt" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Avoid second fetch > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9mrfb-0nnvt in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test start tidb cluster in /tmp/tidb_cdc_test/foreign_key Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Avoid second fetch > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Verifying downstream PD is started... > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1853-jvsw5-4r66k is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1853-jvsw5-ggt2j --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "48563469bbe41fde3b8d5432f0561893563dea72" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1853-jvsw5" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-jvsw5-4r66k" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-jvsw5-4r66k" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-jvsw5-4r66k" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1853-jvsw5-4r66k in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1853-kzqfw-6sn5g is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1853-kzqfw-5txzd --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "8287e198034910f00e458d4c4628bff2a174c955" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1853-kzqfw" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-kzqfw-6sn5g" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-kzqfw-6sn5g" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1853-kzqfw-6sn5g" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1853-kzqfw-6sn5g in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fb777fc000d Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-qf5bw-s1gf0, pid:1346, start at 2024-05-04 22:06:53.451194997 +0800 CST m=+5.093958754 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:08:53.457 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:06:53.439 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-21:56:53.439 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fb777fc000d Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-qf5bw-s1gf0, pid:1346, start at 2024-05-04 22:06:53.451194997 +0800 CST m=+5.093958754 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:08:53.457 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:06:53.439 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-21:56:53.439 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fb777ec0017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-qf5bw-s1gf0, pid:1435, start at 2024-05-04 22:06:53.474935418 +0800 CST m=+5.062358697 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:08:53.484 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:06:53.485 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-21:56:53.485 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/foreign_key/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/foreign_key/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/foreign_key/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/foreign_key/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/foreign_key/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.foreign_key.cli.2812.out cli tso query --pd=http://127.0.0.1:2379 + set +x + tso='449532819403964417 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532819403964417 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 22:06:58 CST 2024] <<<<<< START cdc server in foreign_key case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) + GO_FAILPOINTS= + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.foreign_key.28492851.out server --log-file /tmp/tidb_cdc_test/foreign_key/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/foreign_key/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:07:01 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/41aab3c0-1611-4ca3-97fa-ab9d6bb57b2e {"id":"41aab3c0-1611-4ca3-97fa-ab9d6bb57b2e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831618} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43edb621d0 41aab3c0-1611-4ca3-97fa-ab9d6bb57b2e /tidb/cdc/default/default/upstream/7365145660673735987 {"id":7365145660673735987,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/41aab3c0-1611-4ca3-97fa-ab9d6bb57b2e {"id":"41aab3c0-1611-4ca3-97fa-ab9d6bb57b2e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831618} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43edb621d0 41aab3c0-1611-4ca3-97fa-ab9d6bb57b2e /tidb/cdc/default/default/upstream/7365145660673735987 {"id":7365145660673735987,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/41aab3c0-1611-4ca3-97fa-ab9d6bb57b2e {"id":"41aab3c0-1611-4ca3-97fa-ab9d6bb57b2e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831618} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43edb621d0 41aab3c0-1611-4ca3-97fa-ab9d6bb57b2e /tidb/cdc/default/default/upstream/7365145660673735987 {"id":7365145660673735987,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.foreign_key.cli.2909.out cli changefeed create --start-ts=449532819403964417 '--sink-uri=kafka://127.0.0.1:9092/ticdc-foreign-key-test-22426?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: 0be83614-c712-4463-8e08-e9a2210c481e Info: {"upstream_id":7365145660673735987,"namespace":"default","id":"0be83614-c712-4463-8e08-e9a2210c481e","sink_uri":"kafka://127.0.0.1:9092/ticdc-foreign-key-test-22426?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:07:02.011231634+08:00","start_ts":449532819403964417,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532819403964417,"checkpoint_ts":449532819403964417,"checkpoint_time":"2024-05-04 22:06:56.989"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + set +x [Sat May 4 22:07:03 CST 2024] <<<<<< START kafka consumer in foreign_key case >>>>>> table foreign_key.finish_mark not exists for 1-th check, retry later table foreign_key.finish_mark not exists for 2-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc) 3727130624 bytes in 26.75 secs (139343539 bytes/sec) [Pipeline] { table foreign_key.finish_mark not exists for 3-th check, retry later [Pipeline] container [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] sh [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] { [Pipeline] checkout [Pipeline] { The recommended git tool is: git [Pipeline] checkout [Pipeline] checkout The recommended git tool is: git The recommended git tool is: git [Pipeline] } [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) table foreign_key.finish_mark not exists for 4-th check, retry later [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@736294ea; decorates RemoteLauncher[hudson.remoting.Channel@2606fa6a:JNLP4-connect connection from 10.233.97.76/10.233.97.76:43190] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] // timeout [Pipeline] } No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@35811e11; decorates RemoteLauncher[hudson.remoting.Channel@40ae5c4:JNLP4-connect connection from 10.233.105.178/10.233.105.178:49112] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@297a8929; decorates RemoteLauncher[hudson.remoting.Channel@3641e522:JNLP4-connect connection from 10.233.71.105/10.233.71.105:59408] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] // container Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 [Pipeline] sh [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G05 Run cases: charset_gbk ddl_manager multi_source PROW_JOB_ID=760730ca-f4a4-4d97-abc9-636483ce777c JENKINS_NODE_COOKIE=df9eac35-5ae4-4981-8355-c0a825327b0b BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-c2q9n-lf6gc HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786756543966875649","prowjobid":"760730ca-f4a4-4d97-abc9-636483ce777c","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"6a342866deda3271b067f649c64b771bbe3d2a00","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/6a342866deda3271b067f649c64b771bbe3d2a00","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1853 TEST_GROUP=G05 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786756543966875649 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-c2q9n GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-c2q9n-lf6gc pingcap_tiflow_pull_cdc_integration_kafka_test_1853-c2q9n GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-c2q9n-lf6gc GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1853 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/charset_gbk/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache table foreign_key.finish_mark not exists for 5-th check, retry later > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Commit message: "fix(br): use failpoint tidb-server instead (#2951)" table foreign_key.finish_mark not exists for 6-th check, retry later > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 start tidb cluster in /tmp/tidb_cdc_test/charset_gbk Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table foreign_key.finish_mark not exists for 7-th check, retry later Verifying downstream PD is started... table foreign_key.finish_mark not exists for 8-th check, retry later Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table foreign_key.finish_mark not exists for 9-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table foreign_key.finish_mark not exists for 10-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table foreign_key.finish_mark not exists for 11-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc) 3727130624 bytes in 11.64 secs (320117502 bytes/sec) [Pipeline] { [Pipeline] cache table foreign_key.finish_mark exists check diff successfully wait process cdc.test exit for 1-th time... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:07:29 CST 2024] <<<<<< run test case foreign_key success! >>>>>> VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fb990a0000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-c2q9n-lf6gc, pid:1303, start at 2024-05-04 22:07:27.801338266 +0800 CST m=+5.528315021 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:09:27.808 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:07:27.784 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-21:57:27.784 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fb990a0000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-c2q9n-lf6gc, pid:1303, start at 2024-05-04 22:07:27.801338266 +0800 CST m=+5.528315021 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:09:27.808 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:07:27.784 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-21:57:27.784 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fb9915c0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-c2q9n-lf6gc, pid:1385, start at 2024-05-04 22:07:27.87725683 +0800 CST m=+5.552096221 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:09:27.886 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:07:27.881 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-21:57:27.881 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/charset_gbk/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/charset_gbk/tiflash/log/error.log arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/charset_gbk/tiflash/log/proxy.log"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/charset_gbk/tiflash-proxy.toml"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/charset_gbk/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [Sat May 4 22:07:32 CST 2024] <<<<<< START cdc server in charset_gbk case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.charset_gbk.28642866.out server --log-file /tmp/tidb_cdc_test/charset_gbk/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/charset_gbk/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:07:35 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3c0e3291-eb13-4d0b-a623-1a87722bae79 {"id":"3c0e3291-eb13-4d0b-a623-1a87722bae79","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831653} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43ee3893cc 3c0e3291-eb13-4d0b-a623-1a87722bae79 /tidb/cdc/default/default/upstream/7365145802859892612 {"id":7365145802859892612,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3c0e3291-eb13-4d0b-a623-1a87722bae79 {"id":"3c0e3291-eb13-4d0b-a623-1a87722bae79","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831653} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43ee3893cc 3c0e3291-eb13-4d0b-a623-1a87722bae79 /tidb/cdc/default/default/upstream/7365145802859892612 {"id":7365145802859892612,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3c0e3291-eb13-4d0b-a623-1a87722bae79 {"id":"3c0e3291-eb13-4d0b-a623-1a87722bae79","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831653} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43ee3893cc 3c0e3291-eb13-4d0b-a623-1a87722bae79 /tidb/cdc/default/default/upstream/7365145802859892612 {"id":7365145802859892612,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: 8acae435-174e-4917-bd9e-3574b6535ec8 Info: {"upstream_id":7365145802859892612,"namespace":"default","id":"8acae435-174e-4917-bd9e-3574b6535ec8","sink_uri":"mysql://normal:xxxxx@127.0.0.1:3306/","create_time":"2024-05-04T22:07:36.092850714+08:00","start_ts":449532828787146753,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532828787146753,"checkpoint_ts":449532828787146753,"checkpoint_time":"2024-05-04 22:07:32.783"} [Sat May 4 22:07:36 CST 2024] <<<<<< START kafka consumer in charset_gbk case >>>>>> =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/ddl_puller_lag/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... start tidb cluster in /tmp/tidb_cdc_test/ddl_puller_lag Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc) 3727130624 bytes in 16.56 secs (225085938 bytes/sec) [Pipeline] { [Pipeline] cache Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table charset_gbk_test0.t0 exists table charset_gbk_test0.t1 exists table charset_gbk_test1.t0 exists table test.finish_mark not exists for 1-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.finish_mark not exists for 2-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.finish_mark not exists for 3-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fbb33640016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-qf5bw-s1gf0, pid:4100, start at 2024-05-04 22:07:54.600712354 +0800 CST m=+5.128100233 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:09:54.607 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:07:54.585 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-21:57:54.585 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fbb33640016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-qf5bw-s1gf0, pid:4100, start at 2024-05-04 22:07:54.600712354 +0800 CST m=+5.128100233 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:09:54.607 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:07:54.585 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-21:57:54.585 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fbb333c0015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-qf5bw-s1gf0, pid:4180, start at 2024-05-04 22:07:54.618900189 +0800 CST m=+5.098772762 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:09:54.625 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:07:54.625 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-21:57:54.625 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/ddl_puller_lag/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/ddl_puller_lag/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/ddl_puller_lag/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/ddl_puller_lag/tiflash/log/proxy.log"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/ddl_puller_lag/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table test.finish_mark exists check table exists success check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:07:57 CST 2024] <<<<<< run test case charset_gbk success! >>>>>> + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_puller_lag.cli.5619.out cli tso query --pd=http://127.0.0.1:2379 + set +x + tso='449532835419914241 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532835419914241 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 22:08:00 CST 2024] <<<<<< START cdc server in ddl_puller_lag case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/processorDDLResolved=1*sleep(180000)' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_puller_lag.56755677.out server --log-file /tmp/tidb_cdc_test/ddl_puller_lag/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_puller_lag/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:08:03 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/742dd27d-09e2-4734-bc03-3055ddd0d8f0 {"id":"742dd27d-09e2-4734-bc03-3055ddd0d8f0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831680} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43eea308f0 742dd27d-09e2-4734-bc03-3055ddd0d8f0 /tidb/cdc/default/default/upstream/7365145926357510265 {"id":7365145926357510265,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/742dd27d-09e2-4734-bc03-3055ddd0d8f0 {"id":"742dd27d-09e2-4734-bc03-3055ddd0d8f0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831680} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43eea308f0 742dd27d-09e2-4734-bc03-3055ddd0d8f0 /tidb/cdc/default/default/upstream/7365145926357510265 {"id":7365145926357510265,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/742dd27d-09e2-4734-bc03-3055ddd0d8f0 {"id":"742dd27d-09e2-4734-bc03-3055ddd0d8f0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831680} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43eea308f0 742dd27d-09e2-4734-bc03-3055ddd0d8f0 /tidb/cdc/default/default/upstream/7365145926357510265 {"id":7365145926357510265,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_puller_lag.cli.5729.out cli changefeed create --start-ts=449532835419914241 '--sink-uri=kafka+ssl://127.0.0.1:9092/ticdc-ddl-puller-lag-test-2193?protocol=open-protocol&partition-num=4&kafka-client-id=ddl_puller_lag&kafka-version=2.4.1&max-message-bytes=10485760' Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc) 3727130624 bytes in 16.28 secs (228926920 bytes/sec) [Pipeline] { [Pipeline] cache Create changefeed successfully! ID: 50104afc-fada-4d16-b18e-fc300be80a10 Info: {"upstream_id":7365145926357510265,"namespace":"default","id":"50104afc-fada-4d16-b18e-fc300be80a10","sink_uri":"kafka+ssl://127.0.0.1:9092/ticdc-ddl-puller-lag-test-2193?protocol=open-protocol\u0026partition-num=4\u0026kafka-client-id=ddl_puller_lag\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:08:04.168040766+08:00","start_ts":449532835419914241,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532835419914241,"checkpoint_ts":449532835419914241,"checkpoint_time":"2024-05-04 22:07:58.085"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + set +x [Sat May 4 22:08:05 CST 2024] <<<<<< START kafka consumer in ddl_puller_lag case >>>>>> =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/ddl_manager/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... start tidb cluster in /tmp/tidb_cdc_test/ddl_manager Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc) 3727130624 bytes in 7.83 secs (475859393 bytes/sec) [Pipeline] { [Pipeline] cache Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fbcf2e00013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-c2q9n-lf6gc, pid:4182, start at 2024-05-04 22:08:23.260498449 +0800 CST m=+5.548156539 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:10:23.267 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:08:23.224 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-21:58:23.224 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc) 3727130624 bytes in 8.16 secs (457004521 bytes/sec) [Pipeline] { [Pipeline] cache VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fbcf2e00013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-c2q9n-lf6gc, pid:4182, start at 2024-05-04 22:08:23.260498449 +0800 CST m=+5.548156539 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:10:23.267 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:08:23.224 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-21:58:23.224 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fbcf3900014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-c2q9n-lf6gc, pid:4267, start at 2024-05-04 22:08:23.292868569 +0800 CST m=+5.527643312 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:10:23.300 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:08:23.268 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-21:58:23.268 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/ddl_manager/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/ddl_manager/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/ddl_manager/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/ddl_manager/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/ddl_manager/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [Sat May 4 22:08:28 CST 2024] <<<<<< START cdc server in ddl_manager case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/ExecuteDDLSlowly=return(true)' + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_manager.56115613.out server --log-file /tmp/tidb_cdc_test/ddl_manager/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_manager/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:08:31 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b85582c3-2890-43aa-9cb0-eef996ed820e {"id":"b85582c3-2890-43aa-9cb0-eef996ed820e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831708} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43ef1158cb b85582c3-2890-43aa-9cb0-eef996ed820e /tidb/cdc/default/default/upstream/7365146045570360412 {"id":7365146045570360412,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b85582c3-2890-43aa-9cb0-eef996ed820e {"id":"b85582c3-2890-43aa-9cb0-eef996ed820e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831708} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43ef1158cb b85582c3-2890-43aa-9cb0-eef996ed820e /tidb/cdc/default/default/upstream/7365146045570360412 {"id":7365146045570360412,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b85582c3-2890-43aa-9cb0-eef996ed820e {"id":"b85582c3-2890-43aa-9cb0-eef996ed820e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831708} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43ef1158cb b85582c3-2890-43aa-9cb0-eef996ed820e /tidb/cdc/default/default/upstream/7365146045570360412 {"id":7365146045570360412,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_manager.cli.5676.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-20674?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c=ddl-manager Create changefeed successfully! ID: ddl-manager Info: {"upstream_id":7365146045570360412,"namespace":"default","id":"ddl-manager","sink_uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-20674?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:08:31.591444236+08:00","start_ts":449532844172640259,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532844172640259,"checkpoint_ts":449532844172640259,"checkpoint_time":"2024-05-04 22:08:31.474"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + set +x [Sat May 4 22:08:33 CST 2024] <<<<<< START kafka consumer in ddl_manager case >>>>>> wait process 5616 exit for 1-th time... wait process 5616 exit for 2-th time... wait process 5616 exit for 3-th time... /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils/kill_cdc_pid: line 19: kill: (5616) - No such process wait process 5616 exit for 4-th time... process 5616 already exit [Sat May 4 22:08:38 CST 2024] <<<<<< START cdc server in ddl_manager case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/ExecuteDDLSlowly=return(true)' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_manager.57775779.out server --log-file /tmp/tidb_cdc_test/ddl_manager/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_manager/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:08:45 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7365146045570360412 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-20674?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 22:08:31.591444236 +0800 CST StartTs:449532844172640259 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc001564120 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-52-g6a342866d Epoch:449532844199116802} {CheckpointTs:449532844592070662 MinTableBarrierTs:449532844592070662 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5126c25d-f95e-4cd9-a811-d94f25e41625 {"id":"5126c25d-f95e-4cd9-a811-d94f25e41625","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831718} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43ef115b4d 5126c25d-f95e-4cd9-a811-d94f25e41625 /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7365146045570360412,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-20674?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:08:31.591444236+08:00","start-ts":449532844172640259,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532844199116802} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449532844592070662,"min-table-barrier-ts":449532844592070662,"admin-job-type":0} /tidb/cdc/default/default/task/position/5126c25d-f95e-4cd9-a811-d94f25e41625/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365146045570360412 {"id":7365146045570360412,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7365146045570360412 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-20674?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 22:08:31.591444236 +0800 CST StartTs:449532844172640259 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc001564120 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-52-g6a342866d Epoch:449532844199116802} {CheckpointTs:449532844592070662 MinTableBarrierTs:449532844592070662 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5126c25d-f95e-4cd9-a811-d94f25e41625 {"id":"5126c25d-f95e-4cd9-a811-d94f25e41625","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831718} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43ef115b4d 5126c25d-f95e-4cd9-a811-d94f25e41625 /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7365146045570360412,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-20674?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:08:31.591444236+08:00","start-ts":449532844172640259,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532844199116802} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449532844592070662,"min-table-barrier-ts":449532844592070662,"admin-job-type":0} /tidb/cdc/default/default/task/position/5126c25d-f95e-4cd9-a811-d94f25e41625/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365146045570360412 + grep -q 'failed to get info:' {"id":7365146045570360412,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7365146045570360412 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-20674?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 22:08:31.591444236 +0800 CST StartTs:449532844172640259 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc001564120 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-52-g6a342866d Epoch:449532844199116802} {CheckpointTs:449532844592070662 MinTableBarrierTs:449532844592070662 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5126c25d-f95e-4cd9-a811-d94f25e41625 {"id":"5126c25d-f95e-4cd9-a811-d94f25e41625","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831718} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43ef115b4d 5126c25d-f95e-4cd9-a811-d94f25e41625 /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7365146045570360412,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-20674?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:08:31.591444236+08:00","start-ts":449532844172640259,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532844199116802} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449532844592070662,"min-table-barrier-ts":449532844592070662,"admin-job-type":0} /tidb/cdc/default/default/task/position/5126c25d-f95e-4cd9-a811-d94f25e41625/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365146045570360412 + grep -q 'etcd info' {"id":7365146045570360412,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + break + set +x [Sat May 4 22:08:45 CST 2024] <<<<<< START cdc server in ddl_manager case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/ExecuteDDLSlowly=return(true)' + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_manager.58435845.out server --log-file /tmp/tidb_cdc_test/ddl_manager/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_manager/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc) 3727130624 bytes in 16.28 secs (228877177 bytes/sec) [Pipeline] { [Pipeline] cache < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:08:47 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7365146045570360412 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-20674?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 22:08:31.591444236 +0800 CST StartTs:449532844172640259 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc001564120 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-52-g6a342866d Epoch:449532844199116802} {CheckpointTs:449532844592070662 MinTableBarrierTs:449532844592070662 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5126c25d-f95e-4cd9-a811-d94f25e41625 {"id":"5126c25d-f95e-4cd9-a811-d94f25e41625","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831718} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43ef115b4d 5126c25d-f95e-4cd9-a811-d94f25e41625 /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7365146045570360412,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-20674?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:08:31.591444236+08:00","start-ts":449532844172640259,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532844199116802} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449532844605440035,"min-table-barrier-ts":449532844605440035,"admin-job-type":0} /tidb/cdc/default/default/task/position/5126c25d-f95e-4cd9-a811-d94f25e41625/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365146045570360412 {"id":7365146045570360412,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7365146045570360412 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-20674?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 22:08:31.591444236 +0800 CST StartTs:449532844172640259 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc001564120 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-52-g6a342866d Epoch:449532844199116802} {CheckpointTs:449532844592070662 MinTableBarrierTs:449532844592070662 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5126c25d-f95e-4cd9-a811-d94f25e41625 {"id":"5126c25d-f95e-4cd9-a811-d94f25e41625","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831718} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43ef115b4d 5126c25d-f95e-4cd9-a811-d94f25e41625 /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7365146045570360412,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-20674?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:08:31.591444236+08:00","start-ts":449532844172640259,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532844199116802} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449532844605440035,"min-table-barrier-ts":449532844605440035,"admin-job-type":0} /tidb/cdc/default/default/task/position/5126c25d-f95e-4cd9-a811-d94f25e41625/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365146045570360412 {"id":7365146045570360412,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7365146045570360412 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-20674?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 22:08:31.591444236 +0800 CST StartTs:449532844172640259 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc001564120 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-52-g6a342866d Epoch:449532844199116802} {CheckpointTs:449532844592070662 MinTableBarrierTs:449532844592070662 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5126c25d-f95e-4cd9-a811-d94f25e41625 {"id":"5126c25d-f95e-4cd9-a811-d94f25e41625","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831718} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43ef115b4d 5126c25d-f95e-4cd9-a811-d94f25e41625 /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7365146045570360412,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-20674?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:08:31.591444236+08:00","start-ts":449532844172640259,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532844199116802} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449532844605440035,"min-table-barrier-ts":449532844605440035,"admin-job-type":0} /tidb/cdc/default/default/task/position/5126c25d-f95e-4cd9-a811-d94f25e41625/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365146045570360412 {"id":7365146045570360412,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x table ddl_manager.finish_mark not exists for 1-th check, retry later table ddl_manager.finish_mark not exists for 2-th check, retry later table ddl_manager.finish_mark not exists for 3-th check, retry later table ddl_manager.finish_mark not exists for 4-th check, retry later table ddl_manager.finish_mark not exists for 5-th check, retry later table ddl_manager.finish_mark not exists for 6-th check, retry later table ddl_manager.finish_mark not exists for 7-th check, retry later table ddl_manager.finish_mark not exists for 8-th check, retry later table ddl_manager.finish_mark not exists for 9-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc) 3727130624 bytes in 16.74 secs (222598383 bytes/sec) [Pipeline] { [Pipeline] cache table ddl_manager.finish_mark not exists for 10-th check, retry later table ddl_manager.finish_mark not exists for 11-th check, retry later table ddl_manager.finish_mark not exists for 12-th check, retry later table ddl_manager.finish_mark not exists for 13-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc) 3727130624 bytes in 6.02 secs (619620223 bytes/sec) [Pipeline] { [Pipeline] cache table ddl_manager.finish_mark not exists for 14-th check, retry later table ddl_manager.finish_mark not exists for 15-th check, retry later table ddl_manager.finish_mark not exists for 16-th check, retry later table ddl_manager.finish_mark not exists for 17-th check, retry later table ddl_manager.finish_mark not exists for 18-th check, retry later table ddl_manager.finish_mark not exists for 19-th check, retry later table ddl_manager.finish_mark not exists for 20-th check, retry later table ddl_manager.finish_mark not exists for 21-th check, retry later table ddl_manager.finish_mark not exists for 22-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc) 3727130624 bytes in 15.34 secs (243021504 bytes/sec) [Pipeline] { [Pipeline] cache table ddl_manager.finish_mark not exists for 23-th check, retry later table ddl_manager.finish_mark not exists for 24-th check, retry later table ddl_manager.finish_mark not exists for 25-th check, retry later table ddl_manager.finish_mark not exists for 26-th check, retry later table ddl_manager.finish_mark not exists for 27-th check, retry later table ddl_manager.finish_mark not exists for 28-th check, retry later table ddl_manager.finish_mark not exists for 29-th check, retry later table ddl_manager.finish_mark not exists for 30-th check, retry later table ddl_manager.finish_mark not exists for 31-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc) 3727130624 bytes in 15.49 secs (240673653 bytes/sec) [Pipeline] { [Pipeline] cache table ddl_manager.finish_mark not exists for 32-th check, retry later table ddl_manager.finish_mark not exists for 33-th check, retry later table ddl_manager.finish_mark not exists for 34-th check, retry later table ddl_manager.finish_mark not exists for 35-th check, retry later table ddl_manager.finish_mark not exists for 36-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc) 3727130624 bytes in 8.11 secs (459799251 bytes/sec) [Pipeline] { [Pipeline] cache table ddl_manager.finish_mark not exists for 37-th check, retry later table ddl_manager.finish_mark not exists for 38-th check, retry later table ddl_manager.finish_mark not exists for 39-th check, retry later table ddl_manager.finish_mark not exists for 40-th check, retry later table ddl_manager.finish_mark not exists for 41-th check, retry later table ddl_manager.finish_mark not exists for 42-th check, retry later table ddl_manager.finish_mark not exists for 43-th check, retry later table ddl_manager.finish_mark not exists for 44-th check, retry later table ddl_manager.finish_mark not exists for 45-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc) 3727130624 bytes in 17.51 secs (212798776 bytes/sec) [Pipeline] { [Pipeline] cache table ddl_manager.finish_mark not exists for 46-th check, retry later table ddl_manager.finish_mark not exists for 47-th check, retry later table ddl_manager.finish_mark not exists for 48-th check, retry later table ddl_manager.finish_mark not exists for 49-th check, retry later table ddl_manager.finish_mark not exists for 50-th check, retry later table ddl_manager.finish_mark not exists for 51-th check, retry later table ddl_manager.finish_mark not exists for 52-th check, retry later table ddl_manager.finish_mark not exists for 53-th check, retry later table ddl_manager.finish_mark not exists for 54-th check, retry later table ddl_manager.finish_mark not exists for 55-th check, retry later table ddl_manager.finish_mark not exists for 56-th check, retry later table ddl_manager.finish_mark not exists for 57-th check, retry later table ddl_manager.finish_mark not exists for 58-th check, retry later table ddl_manager.finish_mark not exists for 59-th check, retry later table ddl_manager.finish_mark not exists for 60-th check, retry later table ddl_manager.finish_mark not exists for 61-th check, retry later table ddl_manager.finish_mark not exists for 62-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc) 3727130624 bytes in 30.56 secs (121941416 bytes/sec) [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] sh [Pipeline] sh table ddl_manager.finish_mark not exists for 63-th check, retry later + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh table ddl_manager.finish_mark not exists for 64-th check, retry later + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 table ddl_manager.finish_mark not exists for 65-th check, retry later [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { table ddl_manager.finish_mark not exists for 66-th check, retry later [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { table ddl_manager.finish_mark not exists for 67-th check, retry later [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] } [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout table ddl_manager.finish_mark not exists for 68-th check, retry later [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] sh [Pipeline] sh table ddl_manager.finish_mark not exists for 69-th check, retry later + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G01 Run cases: http_api http_api_tls api_v2 http_api_tls_with_user_auth cli_tls_with_auth kafka_simple_basic kafka_simple_basic_avro kafka_simple_handle_key_only kafka_simple_handle_key_only_avro kafka_simple_claim_check kafka_simple_claim_check_avro canal_json_adapter_compatibility canal_json_basic canal_json_content_compatible multi_topics avro_basic canal_json_handle_key_only open_protocol_handle_key_only canal_json_claim_check open_protocol_claim_check canal_json_storage_basic canal_json_storage_partition_table multi_tables_ddl PROW_JOB_ID=760730ca-f4a4-4d97-abc9-636483ce777c JENKINS_NODE_COOKIE=81c6f1dc-8cb4-410a-8db5-2e3cb9a47a71 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786756543966875649","prowjobid":"760730ca-f4a4-4d97-abc9-636483ce777c","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"6a342866deda3271b067f649c64b771bbe3d2a00","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/6a342866deda3271b067f649c64b771bbe3d2a00","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1853 TEST_GROUP=G01 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786756543966875649 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-lr873 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2 pingcap_tiflow_pull_cdc_integration_kafka_test_1853-lr873 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1853 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/http_api/run.sh using Sink-Type: kafka... <<================= [Sat May 4 22:11:04 CST 2024] <<<<<< run test case http_api success! >>>>>> [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G02 [Pipeline] sh Run cases: consistent_replicate_ddl consistent_replicate_gbk consistent_replicate_nfs consistent_replicate_storage_file consistent_replicate_storage_file_large_value consistent_replicate_storage_s3 consistent_partition_table kafka_big_messages_v2 multi_tables_ddl_v2 multi_topics_v2 storage_cleanup csv_storage_basic csv_storage_multi_tables_ddl csv_storage_partition_table PROW_JOB_ID=760730ca-f4a4-4d97-abc9-636483ce777c JENKINS_NODE_COOKIE=9d92ad44-f980-4630-a32a-a66ede1845de BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-n5hp4-vjsxk HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786756543966875649","prowjobid":"760730ca-f4a4-4d97-abc9-636483ce777c","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"6a342866deda3271b067f649c64b771bbe3d2a00","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/6a342866deda3271b067f649c64b771bbe3d2a00","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1853 TEST_GROUP=G02 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786756543966875649 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-n5hp4 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-n5hp4-vjsxk pingcap_tiflow_pull_cdc_integration_kafka_test_1853-n5hp4 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-n5hp4-vjsxk GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1853 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_ddl/run.sh using Sink-Type: kafka... <<================= [Sat May 4 22:11:04 CST 2024] <<<<<< run test case consistent_replicate_ddl success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G08 Run cases: processor_err_chan changefeed_reconstruct multi_capture synced_status_with_redo PROW_JOB_ID=760730ca-f4a4-4d97-abc9-636483ce777c JENKINS_NODE_COOKIE=9543ae5c-cfc0-4ca2-ac8c-a719a074e4c8 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786756543966875649","prowjobid":"760730ca-f4a4-4d97-abc9-636483ce777c","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"6a342866deda3271b067f649c64b771bbe3d2a00","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/6a342866deda3271b067f649c64b771bbe3d2a00","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1853 TEST_GROUP=G08 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786756543966875649 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-fqx8d GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc pingcap_tiflow_pull_cdc_integration_kafka_test_1853-fqx8d GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1853 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_err_chan/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G00 Run cases: bdr_mode capture_suicide_while_balance_table syncpoint hang_sink_suicide server_config_compatibility changefeed_dup_error_restart kafka_big_messages kafka_compression kafka_messages kafka_sink_error_resume mq_sink_lost_callback mq_sink_dispatcher kafka_column_selector kafka_column_selector_avro debezium lossy_ddl storage_csv_update PROW_JOB_ID=760730ca-f4a4-4d97-abc9-636483ce777c JENKINS_NODE_COOKIE=4626a847-5ba7-402e-bbd4-9f165af8287d BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786756543966875649","prowjobid":"760730ca-f4a4-4d97-abc9-636483ce777c","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"6a342866deda3271b067f649c64b771bbe3d2a00","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/6a342866deda3271b067f649c64b771bbe3d2a00","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1853 TEST_GROUP=G00 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786756543966875649 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-6xzf3 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-6xzf3 pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1853 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/bdr_mode/run.sh using Sink-Type: kafka... <<================= [Sat May 4 22:11:05 CST 2024] <<<<<< run test case bdr_mode success! >>>>>> [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G09 Run cases: gc_safepoint changefeed_pause_resume cli_with_auth savepoint synced_status PROW_JOB_ID=760730ca-f4a4-4d97-abc9-636483ce777c JENKINS_NODE_COOKIE=ce87438e-8eb2-4721-a23d-fc40b9837554 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786756543966875649","prowjobid":"760730ca-f4a4-4d97-abc9-636483ce777c","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"6a342866deda3271b067f649c64b771bbe3d2a00","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/6a342866deda3271b067f649c64b771bbe3d2a00","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1853 TEST_GROUP=G09 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786756543966875649 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-1nlmb GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-1nlmb pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1853 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/gc_safepoint/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G03 Run cases: row_format drop_many_tables processor_stop_delay partition_table PROW_JOB_ID=760730ca-f4a4-4d97-abc9-636483ce777c JENKINS_NODE_COOKIE=4bddac51-8d98-4dbd-b31d-6773eee4ef2d BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-r9cq4-jzgfv HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786756543966875649","prowjobid":"760730ca-f4a4-4d97-abc9-636483ce777c","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"6a342866deda3271b067f649c64b771bbe3d2a00","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/6a342866deda3271b067f649c64b771bbe3d2a00","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1853 TEST_GROUP=G03 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786756543966875649 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-r9cq4 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-r9cq4 pingcap-tiflow-pull-cdc-integration-kafka-test-1853-r9cq4-jzgfv GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-r9cq4-jzgfv GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1853 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/row_format/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G10 Run cases: default_value simple cdc_server_tips event_filter sql_mode PROW_JOB_ID=760730ca-f4a4-4d97-abc9-636483ce777c JENKINS_NODE_COOKIE=c4f4f729-f931-4df8-8203-de5cb54161ee BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786756543966875649","prowjobid":"760730ca-f4a4-4d97-abc9-636483ce777c","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"6a342866deda3271b067f649c64b771bbe3d2a00","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/6a342866deda3271b067f649c64b771bbe3d2a00","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1853 TEST_GROUP=G10 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786756543966875649 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-5mh68 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-5mh68 pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1853 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/default_value/run.sh using Sink-Type: kafka... <<================= [Pipeline] sh [Pipeline] sh table ddl_manager.finish_mark not exists for 70-th check, retry later + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G14 Run cases: changefeed_finish force_replicate_table PROW_JOB_ID=760730ca-f4a4-4d97-abc9-636483ce777c JENKINS_NODE_COOKIE=64008926-64e5-4a76-b18d-26d661d27178 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6j6mj-4nbr6 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786756543966875649","prowjobid":"760730ca-f4a4-4d97-abc9-636483ce777c","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"6a342866deda3271b067f649c64b771bbe3d2a00","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/6a342866deda3271b067f649c64b771bbe3d2a00","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1853 TEST_GROUP=G14 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786756543966875649 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-6j6mj GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-6j6mj pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6j6mj-4nbr6 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6j6mj-4nbr6 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1853 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_finish/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G07 [Pipeline] sh Run cases: kv_client_stream_reconnect cdc split_region PROW_JOB_ID=760730ca-f4a4-4d97-abc9-636483ce777c JENKINS_NODE_COOKIE=cdb09570-5208-4f1f-8864-06fef246f88c BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fchds-2rh3m HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786756543966875649","prowjobid":"760730ca-f4a4-4d97-abc9-636483ce777c","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"6a342866deda3271b067f649c64b771bbe3d2a00","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/6a342866deda3271b067f649c64b771bbe3d2a00","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1853 TEST_GROUP=G07 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786756543966875649 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-fchds GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fchds-2rh3m pingcap_tiflow_pull_cdc_integration_kafka_test_1853-fchds GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fchds-2rh3m GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1853 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kv_client_stream_reconnect/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G12 Run cases: many_pk_or_uk capture_session_done_during_task ddl_attributes PROW_JOB_ID=760730ca-f4a4-4d97-abc9-636483ce777c JENKINS_NODE_COOKIE=a7abb055-1613-4d32-8a57-b308e913a8c6 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-v3qw7-1d792 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786756543966875649","prowjobid":"760730ca-f4a4-4d97-abc9-636483ce777c","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"6a342866deda3271b067f649c64b771bbe3d2a00","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/6a342866deda3271b067f649c64b771bbe3d2a00","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1853 TEST_GROUP=G12 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786756543966875649 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-v3qw7 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-v3qw7-1d792 pingcap_tiflow_pull_cdc_integration_kafka_test_1853-v3qw7 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-v3qw7-1d792 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1853 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/many_pk_or_uk/run.sh using Sink-Type: kafka... <<================= find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/http_api_tls/run.sh using Sink-Type: kafka... <<================= [Sat May 4 22:11:07 CST 2024] <<<<<< run test case http_api_tls success! >>>>>> [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G11 Run cases: resolve_lock move_table autorandom generate_column PROW_JOB_ID=760730ca-f4a4-4d97-abc9-636483ce777c JENKINS_NODE_COOKIE=be2f07f0-2570-4c9f-82d0-67faefd5718d BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-3jhbg-sn2sk HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786756543966875649","prowjobid":"760730ca-f4a4-4d97-abc9-636483ce777c","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"6a342866deda3271b067f649c64b771bbe3d2a00","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/6a342866deda3271b067f649c64b771bbe3d2a00","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1853 TEST_GROUP=G11 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786756543966875649 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-3jhbg GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-3jhbg pingcap-tiflow-pull-cdc-integration-kafka-test-1853-3jhbg-sn2sk GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-3jhbg-sn2sk GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1853 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/resolve_lock/run.sh using Sink-Type: kafka... <<================= [Pipeline] // container [Pipeline] sh find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_gbk/run.sh using Sink-Type: kafka... <<================= * About to connect() to 127.0.0.1 port 24927 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:24927; Connection refused * Closing connection 0 find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/capture_suicide_while_balance_table/run.sh using Sink-Type: kafka... <<================= [Sat May 4 22:11:08 CST 2024] <<<<<< run test case capture_suicide_while_balance_table success! >>>>>> table ddl_manager.finish_mark not exists for 71-th check, retry later [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow You are running an older version of MinIO released 3 years ago Update: Run `mc admin update` Attempting encryption of all config, IAM users and policies on MinIO backend [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G06 + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G13 Run cases: tiflash region_merge common_1 PROW_JOB_ID=760730ca-f4a4-4d97-abc9-636483ce777c JENKINS_NODE_COOKIE=1d53eeec-b14d-4c44-a020-0b8ece264ad5 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9b5lh-d8gp8 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786756543966875649","prowjobid":"760730ca-f4a4-4d97-abc9-636483ce777c","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"6a342866deda3271b067f649c64b771bbe3d2a00","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/6a342866deda3271b067f649c64b771bbe3d2a00","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1853 TEST_GROUP=G13 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786756543966875649 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-9b5lh GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-9b5lh pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9b5lh-d8gp8 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9b5lh-d8gp8 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1853 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/tiflash/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Run cases: sink_retry changefeed_error ddl_sequence resourcecontrol PROW_JOB_ID=760730ca-f4a4-4d97-abc9-636483ce777c JENKINS_NODE_COOKIE=70d53ae4-137a-4fe3-9d8d-92a43bfd3517 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-mpv2f-zft7l HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786756543966875649","prowjobid":"760730ca-f4a4-4d97-abc9-636483ce777c","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"6a342866deda3271b067f649c64b771bbe3d2a00","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/6a342866deda3271b067f649c64b771bbe3d2a00","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1853 TEST_GROUP=G06 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786756543966875649 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-mpv2f GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-mpv2f-zft7l pingcap_tiflow_pull_cdc_integration_kafka_test_1853-mpv2f GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-mpv2f-zft7l GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1853 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sink_retry/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Endpoint: http://127.0.0.1:24927 Object API (Amazon S3 compatible): Go: https://docs.min.io/docs/golang-client-quickstart-guide Java: https://docs.min.io/docs/java-client-quickstart-guide Python: https://docs.min.io/docs/python-client-quickstart-guide JavaScript: https://docs.min.io/docs/javascript-client-quickstart-guide .NET: https://docs.min.io/docs/dotnet-client-quickstart-guide * About to connect() to 127.0.0.1 port 24927 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 24927 (#0) > GET / HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:24927 > Accept: */* > < HTTP/1.1 403 Forbidden < Accept-Ranges: bytes < Content-Length: 226 < Content-Security-Policy: block-all-mixed-content < Content-Type: application/xml < Server: MinIO/RELEASE.2020-07-27T18-37-02Z < Vary: Origin < X-Amz-Request-Id: 17CC4E800EA6495B < X-Xss-Protection: 1; mode=block < Date: Sat, 04 May 2024 14:11:10 GMT < { [data not shown] * Connection #0 to host 127.0.0.1 left intact Bucket 's3://logbucket/' created [Sat May 4 22:11:10 CST 2024] <<<<<< run test case consistent_replicate_gbk success! >>>>>> Exiting on signal: INTERRUPT table ddl_manager.finish_mark not exists for 72-th check, retry later find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/api_v2/run.sh using Sink-Type: kafka... <<================= start tidb cluster in /tmp/tidb_cdc_test/changefeed_finish Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... TEST FAILED: OUTPUT DOES NOT CONTAIN 'id: 1' ____________________________________ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ check data failed 1-th time, retry later check data successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:11:09 CST 2024] <<<<<< run test case ddl_puller_lag success! >>>>>> start tidb cluster in /tmp/tidb_cdc_test/tiflash Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... table ddl_manager.finish_mark not exists for 73-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/gc_safepoint Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/syncpoint/run.sh using Sink-Type: kafka... <<================= kafka downstream isn't support syncpoint record [Sat May 4 22:11:11 CST 2024] <<<<<< run test case syncpoint success! >>>>>> start tidb cluster in /tmp/tidb_cdc_test/kv_client_stream_reconnect Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/row_format Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... The 1 times to try to start tidb cluster... start tidb cluster in /tmp/tidb_cdc_test/sink_retry Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/processor_err_chan Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... The 1 times to try to start tidb cluster... Verifying downstream PD is started... find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/http_api_tls_with_user_auth/run.sh using Sink-Type: kafka... <<================= [Sat May 4 22:11:13 CST 2024] <<<<<< run test case http_api_tls_with_user_auth success! >>>>>> Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_nfs/run.sh using Sink-Type: kafka... <<================= [Sat May 4 22:11:13 CST 2024] <<<<<< run test case consistent_replicate_nfs success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 74-th check, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/hang_sink_suicide/run.sh using Sink-Type: kafka... <<================= [Sat May 4 22:11:15 CST 2024] <<<<<< run test case hang_sink_suicide success! >>>>>> Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release start tidb cluster in /tmp/tidb_cdc_test/many_pk_or_uk Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release start tidb cluster in /tmp/tidb_cdc_test/default_value Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table ddl_manager.finish_mark not exists for 75-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/cli_tls_with_auth/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_storage_file/run.sh using Sink-Type: kafka... <<================= [Sat May 4 22:11:17 CST 2024] <<<<<< run test case consistent_replicate_storage_file success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) The 1 times to try to start tidb cluster... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/server_config_compatibility/run.sh using Sink-Type: kafka... <<================= [Sat May 4 22:11:18 CST 2024] <<<<<< run test case server_config_compatibility success! >>>>>> table ddl_manager.finish_mark not exists for 76-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc) 3727130624 bytes in 9.39 secs (396922743 bytes/sec) [Pipeline] { [Pipeline] cache ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 77-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release start tidb cluster in /tmp/tidb_cdc_test/cli_tls_with_auth Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_storage_file_large_value/run.sh using Sink-Type: kafka... <<================= [Sat May 4 22:11:20 CST 2024] <<<<<< run test case consistent_replicate_storage_file_large_value success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_dup_error_restart/run.sh using Sink-Type: kafka... <<================= [Sat May 4 22:11:21 CST 2024] <<<<<< run test case changefeed_dup_error_restart success! >>>>>> =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/ddl_only_block_related_table/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) start tidb cluster in /tmp/tidb_cdc_test/resolve_lock Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 78-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc7eeb00013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6j6mj-4nbr6, pid:1350, start at 2024-05-04 22:11:23.218622076 +0800 CST m=+5.702839842 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:23.227 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:23.230 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:23.230 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_storage_s3/run.sh using Sink-Type: kafka... <<================= * About to connect() to 127.0.0.1 port 24927 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:24927; Connection refused * Closing connection 0 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc7ec500011 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fchds-2rh3m, pid:1342, start at 2024-05-04 22:11:23.042477181 +0800 CST m=+5.120725113 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:23.048 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:23.028 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:23.028 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc7ec500011 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fchds-2rh3m, pid:1342, start at 2024-05-04 22:11:23.042477181 +0800 CST m=+5.120725113 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:23.048 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:23.028 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:23.028 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc7edd80014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fchds-2rh3m, pid:1431, start at 2024-05-04 22:11:23.159010622 +0800 CST m=+5.177603086 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:23.165 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:23.126 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:23.126 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } start tidb cluster in /tmp/tidb_cdc_test/ddl_only_block_related_table Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... You are running an older version of MinIO released 3 years ago Update: Run `mc admin update` Attempting encryption of all config, IAM users and policies on MinIO backend Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc7e1940021 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw, pid:1418, start at 2024-05-04 22:11:22.387227144 +0800 CST m=+6.249005359 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:22.397 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:22.391 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:22.391 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc7e1940021 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw, pid:1418, start at 2024-05-04 22:11:22.387227144 +0800 CST m=+6.249005359 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:22.397 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:22.391 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:22.391 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc7d2c00017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw, pid:1500, start at 2024-05-04 22:11:21.440919597 +0800 CST m=+5.245749569 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:21.447 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:21.442 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:21.442 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/gc_safepoint/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/gc_safepoint/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/gc_safepoint/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/gc_safepoint/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/gc_safepoint/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table ddl_manager.finish_mark not exists for 79-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc7f264000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc, pid:1284, start at 2024-05-04 22:11:23.433110139 +0800 CST m=+5.269148775 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:23.440 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:23.417 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:23.417 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc7f264000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc, pid:1284, start at 2024-05-04 22:11:23.433110139 +0800 CST m=+5.269148775 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:23.440 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:23.417 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:23.417 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc7f3f40014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc, pid:1365, start at 2024-05-04 22:11:23.538066462 +0800 CST m=+5.317895896 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:23.546 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:23.517 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:23.517 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc7f2a80009 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-r9cq4-jzgfv, pid:1408, start at 2024-05-04 22:11:23.444126711 +0800 CST m=+5.165164703 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:23.450 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:23.434 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:23.434 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc7f2a80009 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-r9cq4-jzgfv, pid:1408, start at 2024-05-04 22:11:23.444126711 +0800 CST m=+5.165164703 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:23.450 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:23.434 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:23.434 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc7fd840003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-r9cq4-jzgfv, pid:1496, start at 2024-05-04 22:11:24.131989374 +0800 CST m=+5.799101930 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:24.139 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:24.129 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:24.129 +0800 All versions after safe point can be accessed. (DO NOT EDIT) find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_big_messages/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/processor_err_chan/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/processor_err_chan/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/processor_err_chan/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/processor_err_chan/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/processor_err_chan/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/row_format/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/row_format/tiflash/log/error.log arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/row_format/tiflash/log/proxy.log"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/row_format/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/row_format/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Endpoint: http://127.0.0.1:24927 Object API (Amazon S3 compatible): Go: https://docs.min.io/docs/golang-client-quickstart-guide Java: https://docs.min.io/docs/java-client-quickstart-guide Python: https://docs.min.io/docs/python-client-quickstart-guide JavaScript: https://docs.min.io/docs/javascript-client-quickstart-guide .NET: https://docs.min.io/docs/dotnet-client-quickstart-guide VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc7e1f00007 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9b5lh-d8gp8, pid:1294, start at 2024-05-04 22:11:22.370055555 +0800 CST m=+5.465365034 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:22.376 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:22.364 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:22.364 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc7e1f00007 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9b5lh-d8gp8, pid:1294, start at 2024-05-04 22:11:22.370055555 +0800 CST m=+5.465365034 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:22.376 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:22.364 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:22.364 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc7e3740009 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9b5lh-d8gp8, pid:1375, start at 2024-05-04 22:11:22.469899418 +0800 CST m=+5.507386753 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:22.479 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:22.461 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:22.461 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/tiflash/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/tiflash/tiflash/log/error.log arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/tiflash/tiflash-proxy.toml"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/tiflash/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/tiflash/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc7eeb00013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6j6mj-4nbr6, pid:1350, start at 2024-05-04 22:11:23.218622076 +0800 CST m=+5.702839842 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:23.227 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:23.230 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:23.230 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc7f000000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6j6mj-4nbr6, pid:1438, start at 2024-05-04 22:11:23.278454914 +0800 CST m=+5.711535390 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:23.286 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:23.264 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:23.264 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/changefeed_finish/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/changefeed_finish/tiflash/log/error.log arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_finish/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_finish/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_finish/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Verifying downstream PD is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc7eca40012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-mpv2f-zft7l, pid:1359, start at 2024-05-04 22:11:23.073871985 +0800 CST m=+5.290668071 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:23.081 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:23.049 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:23.049 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc7eca40012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-mpv2f-zft7l, pid:1359, start at 2024-05-04 22:11:23.073871985 +0800 CST m=+5.290668071 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:23.081 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:23.049 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:23.049 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc7ed680015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-mpv2f-zft7l, pid:1444, start at 2024-05-04 22:11:23.135629475 +0800 CST m=+5.294986789 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:23.142 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:23.148 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:23.148 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/sink_retry/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/sink_retry/tiflash/log/error.log arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/sink_retry/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/sink_retry/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/sink_retry/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } * About to connect() to 127.0.0.1 port 24927 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 24927 (#0) > GET / HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:24927 > Accept: */* > < HTTP/1.1 403 Forbidden < Accept-Ranges: bytes < Content-Length: 226 < Content-Security-Policy: block-all-mixed-content < Content-Type: application/xml < Server: MinIO/RELEASE.2020-07-27T18-37-02Z < Vary: Origin < X-Amz-Request-Id: 17CC4E83C562B6A7 < X-Xss-Protection: 1; mode=block < Date: Sat, 04 May 2024 14:11:25 GMT < { [data not shown] * Connection #0 to host 127.0.0.1 left intact Bucket 's3://logbucket/' created [Sat May 4 22:11:26 CST 2024] <<<<<< run test case consistent_replicate_storage_s3 success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Exiting on signal: INTERRUPT [Sat May 4 22:11:26 CST 2024] <<<<<< START cdc server in kv_client_stream_reconnect case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/kv/kvClientForceReconnect=return(true)' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kv_client_stream_reconnect.28272829.out server --log-file /tmp/tidb_cdc_test/kv_client_stream_reconnect/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kv_client_stream_reconnect/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [Sat May 4 22:11:26 CST 2024] <<<<<< START cdc server in gc_safepoint case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS='github.com/pingcap/tiflow/pkg/txnutil/gc/InjectGcSafepointUpdateInterval=return(500)' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.gc_safepoint.28452847.out server --log-file /tmp/tidb_cdc_test/gc_safepoint/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/gc_safepoint/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table ddl_manager.finish_mark not exists for 80-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc81aec0016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m, pid:1475, start at 2024-05-04 22:11:26.036647327 +0800 CST m=+5.133414577 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:26.043 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:26.011 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:26.011 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc81aec0016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m, pid:1475, start at 2024-05-04 22:11:26.036647327 +0800 CST m=+5.133414577 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:26.043 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:26.011 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:26.011 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc81d400006 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m, pid:1563, start at 2024-05-04 22:11:26.165612954 +0800 CST m=+5.205771722 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:26.174 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:26.160 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:26.160 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/default_value/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/default_value/tiflash/log/error.log arg matches is ArgMatches { args: {"log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/default_value/tiflash/log/proxy.log"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/default_value/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/default_value/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.tiflash.cli.2817.out cli tso query --pd=http://127.0.0.1:2379 Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.row_format.cli.2834.out cli tso query --pd=http://127.0.0.1:2379 [Sat May 4 22:11:27 CST 2024] <<<<<< START cdc server in changefeed_finish case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_finish.28362838.out server --log-file /tmp/tidb_cdc_test/changefeed_finish/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_finish/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 start tidb cluster in /tmp/tidb_cdc_test/kafka_big_messages Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc) 3727130624 bytes in 5.63 secs (661955136 bytes/sec) [Pipeline] { [Pipeline] cache + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sink_retry.cli.2850.out cli tso query --pd=http://127.0.0.1:2379 Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 81-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc838200005 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-v3qw7-1d792, pid:1363, start at 2024-05-04 22:11:27.887496855 +0800 CST m=+5.626645134 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:27.894 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:27.880 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:27.880 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc838200005 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-v3qw7-1d792, pid:1363, start at 2024-05-04 22:11:27.887496855 +0800 CST m=+5.626645134 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:27.894 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:27.880 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:27.880 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + tso='449532890333577217 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532890333577217 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 22:11:29 CST 2024] <<<<<< START cdc server in tiflash case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.tiflash.28422844.out server --log-file /tmp/tidb_cdc_test/tiflash/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/tiflash/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [Sat May 4 22:11:29 CST 2024] <<<<<< START cdc server in processor_err_chan case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/ProcessorAddTableError=1*return(true)' + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.processor_err_chan.29552957.out server --log-file /tmp/tidb_cdc_test/processor_err_chan/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/processor_err_chan/cdc_data --cluster-id default --config /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_err_chan/conf/server.toml --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:11:29 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a7a6e751-a339-41da-a81a-b327267264f7 {"id":"a7a6e751-a339-41da-a81a-b327267264f7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831886} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1cf50c7 a7a6e751-a339-41da-a81a-b327267264f7 /tidb/cdc/default/default/upstream/7365146818390518388 {"id":7365146818390518388,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a7a6e751-a339-41da-a81a-b327267264f7 {"id":"a7a6e751-a339-41da-a81a-b327267264f7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831886} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1cf50c7 a7a6e751-a339-41da-a81a-b327267264f7 /tidb/cdc/default/default/upstream/7365146818390518388 {"id":7365146818390518388,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a7a6e751-a339-41da-a81a-b327267264f7 {"id":"a7a6e751-a339-41da-a81a-b327267264f7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831886} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1cf50c7 a7a6e751-a339-41da-a81a-b327267264f7 /tidb/cdc/default/default/upstream/7365146818390518388 {"id":7365146818390518388,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + set +x + tso='449532890390986753 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532890390986753 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 22:11:29 CST 2024] <<<<<< START cdc server in row_format case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.row_format.28782880.out server --log-file /tmp/tidb_cdc_test/row_format/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/row_format/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.default_value.cli.2906.out cli tso query --pd=http://127.0.0.1:2379 [Sat May 4 22:11:29 CST 2024] <<<<<< START kafka consumer in kv_client_stream_reconnect case >>>>>> + set +x + tso='449532890565312513 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532890565312513 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:11:29 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5542d847-89b2-443e-9041-f1b5295b3fb1 {"id":"5542d847-89b2-443e-9041-f1b5295b3fb1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831887} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1ce31cd 5542d847-89b2-443e-9041-f1b5295b3fb1 /tidb/cdc/default/default/upstream/7365146815945053777 {"id":7365146815945053777,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5542d847-89b2-443e-9041-f1b5295b3fb1 {"id":"5542d847-89b2-443e-9041-f1b5295b3fb1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831887} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1ce31cd 5542d847-89b2-443e-9041-f1b5295b3fb1 /tidb/cdc/default/default/upstream/7365146815945053777 {"id":7365146815945053777,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5542d847-89b2-443e-9041-f1b5295b3fb1 {"id":"5542d847-89b2-443e-9041-f1b5295b3fb1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831887} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1ce31cd 5542d847-89b2-443e-9041-f1b5295b3fb1 /tidb/cdc/default/default/upstream/7365146815945053777 {"id":7365146815945053777,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 22:11:29 CST 2024] <<<<<< START kafka consumer in gc_safepoint case >>>>>> 0 Verifying downstream PD is started... =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_partition_table/run.sh using Sink-Type: kafka... <<================= [Sat May 4 22:11:29 CST 2024] <<<<<< run test case consistent_partition_table success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 82-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc853680013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-v3qw7-1d792, pid:1448, start at 2024-05-04 22:11:29.662042522 +0800 CST m=+7.318827587 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:29.671 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:29.675 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:29.675 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/many_pk_or_uk/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/many_pk_or_uk/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/many_pk_or_uk/tiflash-proxy.toml"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/many_pk_or_uk/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/many_pk_or_uk/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + set +x + tso='449532890843971585 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532890843971585 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 22:11:30 CST 2024] <<<<<< START cdc server in default_value case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.default_value.29442946.out server --log-file /tmp/tidb_cdc_test/default_value/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/default_value/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ***************** properties ***************** "requestdistribution"="uniform" "readallfields"="true" "workload"="core" "mysql.port"="4000" "scanproportion"="0" "readproportion"="0" "mysql.user"="root" "threadcount"="2" "operationcount"="0" "insertproportion"="0" "mysql.db"="sink_retry" "updateproportion"="0" "mysql.host"="127.0.0.1" "dotransactions"="false" "recordcount"="10" ********************************************** Run finished, takes 8.968471ms INSERT - Takes(s): 0.0, Count: 10, OPS: 2205.0, Avg(us): 1726, Min(us): 989, Max(us): 4372, 95th(us): 5000, 99th(us): 5000 [Sat May 4 22:11:31 CST 2024] <<<<<< START cdc server in sink_retry case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/sink/dmlsink/txn/mysql/MySQLSinkTxnRandomError=25%return(true)' + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sink_retry.29072909.out server --log-file /tmp/tidb_cdc_test/sink_retry/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/sink_retry/cdc_data --cluster-id default + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff failed 1-th time, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:11:30 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/1e36f411-2107-4f3d-99ec-b5d76e757bfb {"id":"1e36f411-2107-4f3d-99ec-b5d76e757bfb","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831888} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1cdc0cb 1e36f411-2107-4f3d-99ec-b5d76e757bfb /tidb/cdc/default/default/upstream/7365146820937394814 {"id":7365146820937394814,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/1e36f411-2107-4f3d-99ec-b5d76e757bfb {"id":"1e36f411-2107-4f3d-99ec-b5d76e757bfb","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831888} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1cdc0cb 1e36f411-2107-4f3d-99ec-b5d76e757bfb /tidb/cdc/default/default/upstream/7365146820937394814 {"id":7365146820937394814,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/1e36f411-2107-4f3d-99ec-b5d76e757bfb {"id":"1e36f411-2107-4f3d-99ec-b5d76e757bfb","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831888} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1cdc0cb 1e36f411-2107-4f3d-99ec-b5d76e757bfb /tidb/cdc/default/default/upstream/7365146820937394814 {"id":7365146820937394814,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 22:11:31 CST 2024] <<<<<< START kafka consumer in changefeed_finish case >>>>>> check diff failed 1-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:11:32 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6e63f7f0-01d3-4d9e-8061-e08a8c8c57c5 {"id":"6e63f7f0-01d3-4d9e-8061-e08a8c8c57c5","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831889} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1d137d5 6e63f7f0-01d3-4d9e-8061-e08a8c8c57c5 /tidb/cdc/default/default/upstream/7365146815889707172 {"id":7365146815889707172,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6e63f7f0-01d3-4d9e-8061-e08a8c8c57c5 {"id":"6e63f7f0-01d3-4d9e-8061-e08a8c8c57c5","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831889} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1d137d5 6e63f7f0-01d3-4d9e-8061-e08a8c8c57c5 /tidb/cdc/default/default/upstream/7365146815889707172 {"id":7365146815889707172,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6e63f7f0-01d3-4d9e-8061-e08a8c8c57c5 {"id":"6e63f7f0-01d3-4d9e-8061-e08a8c8c57c5","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831889} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1d137d5 6e63f7f0-01d3-4d9e-8061-e08a8c8c57c5 /tidb/cdc/default/default/upstream/7365146815889707172 {"id":7365146815889707172,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: b5caf14a-fa62-4984-9213-2bee4e4d835f Info: {"upstream_id":7365146815889707172,"namespace":"default","id":"b5caf14a-fa62-4984-9213-2bee4e4d835f","sink_uri":"kafka://127.0.0.1:9092/ticdc-tiflash-test-2525?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:11:32.325642966+08:00","start_ts":449532890333577217,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532890333577217,"checkpoint_ts":449532890333577217,"checkpoint_time":"2024-05-04 22:11:27.564"} [Sat May 4 22:11:32 CST 2024] <<<<<< START kafka consumer in tiflash case >>>>>> table ddl_manager.finish_mark not exists for 83-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:11:32 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6e452eb6-837a-4de2-9b95-e5fb032a5ff4 {"id":"6e452eb6-837a-4de2-9b95-e5fb032a5ff4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831889} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1d23460 6e452eb6-837a-4de2-9b95-e5fb032a5ff4 /tidb/cdc/default/default/upstream/7365146822457792924 {"id":7365146822457792924,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6e452eb6-837a-4de2-9b95-e5fb032a5ff4 {"id":"6e452eb6-837a-4de2-9b95-e5fb032a5ff4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831889} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1d23460 6e452eb6-837a-4de2-9b95-e5fb032a5ff4 /tidb/cdc/default/default/upstream/7365146822457792924 {"id":7365146822457792924,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6e452eb6-837a-4de2-9b95-e5fb032a5ff4 {"id":"6e452eb6-837a-4de2-9b95-e5fb032a5ff4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831889} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1d23460 6e452eb6-837a-4de2-9b95-e5fb032a5ff4 /tidb/cdc/default/default/upstream/7365146822457792924 {"id":7365146822457792924,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:11:32 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/53afecf0-f4e2-4f25-85c2-8b72af0aef97 {"id":"53afecf0-f4e2-4f25-85c2-8b72af0aef97","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831889} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1d0b2d1 53afecf0-f4e2-4f25-85c2-8b72af0aef97 /tidb/cdc/default/default/upstream/7365146824976808363 {"id":7365146824976808363,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/53afecf0-f4e2-4f25-85c2-8b72af0aef97 {"id":"53afecf0-f4e2-4f25-85c2-8b72af0aef97","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831889} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1d0b2d1 53afecf0-f4e2-4f25-85c2-8b72af0aef97 /tidb/cdc/default/default/upstream/7365146824976808363 {"id":7365146824976808363,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/53afecf0-f4e2-4f25-85c2-8b72af0aef97 {"id":"53afecf0-f4e2-4f25-85c2-8b72af0aef97","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831889} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1d0b2d1 53afecf0-f4e2-4f25-85c2-8b72af0aef97 /tidb/cdc/default/default/upstream/7365146824976808363 {"id":7365146824976808363,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.row_format.cli.2936.out cli changefeed create --start-ts=449532890390986753 '--sink-uri=kafka://127.0.0.1:9092/ticdc-row-format-test-15549?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: 18cb1665-cd04-4b2f-ab99-4d7170fb623b Info: {"upstream_id":7365146824976808363,"namespace":"default","id":"18cb1665-cd04-4b2f-ab99-4d7170fb623b","sink_uri":"kafka://127.0.0.1:9092/ticdc-row-format-test-15549?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:11:32.794704371+08:00","start_ts":449532890390986753,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532890390986753,"checkpoint_ts":449532890390986753,"checkpoint_time":"2024-05-04 22:11:27.783"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... [Sat May 4 22:11:32 CST 2024] <<<<<< START kafka consumer in processor_err_chan case >>>>>> check_changefeed_state http://127.0.0.1:2379 3fd81196-68ad-4c9c-81a5-362462baecf3 normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=3fd81196-68ad-4c9c-81a5-362462baecf3 + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 3fd81196-68ad-4c9c-81a5-362462baecf3 -s + info='{ "upstream_id": 7365146822457792924, "namespace": "default", "id": "3fd81196-68ad-4c9c-81a5-362462baecf3", "state": "normal", "checkpoint_tso": 449532891631976454, "checkpoint_time": "2024-05-04 22:11:32.517", "error": null }' + echo '{ "upstream_id": 7365146822457792924, "namespace": "default", "id": "3fd81196-68ad-4c9c-81a5-362462baecf3", "state": "normal", "checkpoint_tso": 449532891631976454, "checkpoint_time": "2024-05-04 22:11:32.517", "error": null }' { "upstream_id": 7365146822457792924, "namespace": "default", "id": "3fd81196-68ad-4c9c-81a5-362462baecf3", "state": "normal", "checkpoint_tso": 449532891631976454, "checkpoint_time": "2024-05-04 22:11:32.517", "error": null } ++ echo '{' '"upstream_id":' 7365146822457792924, '"namespace":' '"default",' '"id":' '"3fd81196-68ad-4c9c-81a5-362462baecf3",' '"state":' '"normal",' '"checkpoint_tso":' 449532891631976454, '"checkpoint_time":' '"2024-05-04' '22:11:32.517",' '"error":' null '}' ++ jq -r .state ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365146822457792924, '"namespace":' '"default",' '"id":' '"3fd81196-68ad-4c9c-81a5-362462baecf3",' '"state":' '"normal",' '"checkpoint_tso":' 449532891631976454, '"checkpoint_time":' '"2024-05-04' '22:11:32.517",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check diff failed 2-th time, retry later check diff failed 1-th time, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.many_pk_or_uk.cli.2912.out cli tso query --pd=http://127.0.0.1:2379 find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_big_messages_v2/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc883140013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-3jhbg-sn2sk, pid:1484, start at 2024-05-04 22:11:32.708710253 +0800 CST m=+5.287226475 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:32.717 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:32.677 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:32.677 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 2-th time, retry later table cdc_tiflash_test.multi_data_type not exists for 1-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:11:34 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b290282e-9c16-40b3-b1b7-e480a497f381 {"id":"b290282e-9c16-40b3-b1b7-e480a497f381","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831891} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1e0d0d1 b290282e-9c16-40b3-b1b7-e480a497f381 /tidb/cdc/default/default/upstream/7365146832948708628 {"id":7365146832948708628,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b290282e-9c16-40b3-b1b7-e480a497f381 {"id":"b290282e-9c16-40b3-b1b7-e480a497f381","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831891} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1e0d0d1 b290282e-9c16-40b3-b1b7-e480a497f381 /tidb/cdc/default/default/upstream/7365146832948708628 {"id":7365146832948708628,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b290282e-9c16-40b3-b1b7-e480a497f381 {"id":"b290282e-9c16-40b3-b1b7-e480a497f381","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831891} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1e0d0d1 b290282e-9c16-40b3-b1b7-e480a497f381 /tidb/cdc/default/default/upstream/7365146832948708628 {"id":7365146832948708628,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.default_value.cli.3000.out cli changefeed create --start-ts=449532890843971585 '--sink-uri=kafka://127.0.0.1:9092/ticdc-default-value-test-15348?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' table ddl_manager.finish_mark not exists for 84-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc878ac0005 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2, pid:1809, start at 2024-05-04 22:11:32.014370712 +0800 CST m=+5.779399041 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:32.021 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:32.011 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:32.011 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc878ac0005 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2, pid:1809, start at 2024-05-04 22:11:32.014370712 +0800 CST m=+5.779399041 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:32.021 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:32.011 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:32.011 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc879380014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2, pid:1896, start at 2024-05-04 22:11:32.077012245 +0800 CST m=+5.788395577 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:32.084 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:32.046 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:32.046 +0800 All versions after safe point can be accessed. (DO NOT EDIT) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:11:34 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5c12592a-8426-48e5-9896-4036231d670e {"id":"5c12592a-8426-48e5-9896-4036231d670e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831891} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1d284f8 5c12592a-8426-48e5-9896-4036231d670e /tidb/cdc/default/default/upstream/7365146813534688651 {"id":7365146813534688651,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5c12592a-8426-48e5-9896-4036231d670e {"id":"5c12592a-8426-48e5-9896-4036231d670e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831891} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1d284f8 5c12592a-8426-48e5-9896-4036231d670e /tidb/cdc/default/default/upstream/7365146813534688651 {"id":7365146813534688651,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5c12592a-8426-48e5-9896-4036231d670e {"id":"5c12592a-8426-48e5-9896-4036231d670e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831891} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1d284f8 5c12592a-8426-48e5-9896-4036231d670e /tidb/cdc/default/default/upstream/7365146813534688651 {"id":7365146813534688651,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sink_retry.cli.2968.out cli changefeed create --start-ts=449532890565312513 '--sink-uri=kafka://127.0.0.1:9092/ticdc-sink-retry-test-28091?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: 3134ef4d-aecd-43fd-affe-68d729e0c172 Info: {"upstream_id":7365146832948708628,"namespace":"default","id":"3134ef4d-aecd-43fd-affe-68d729e0c172","sink_uri":"kafka://127.0.0.1:9092/ticdc-default-value-test-15348?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:11:34.584020755+08:00","start_ts":449532890843971585,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532890843971585,"checkpoint_ts":449532890843971585,"checkpoint_time":"2024-05-04 22:11:29.511"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/cli_tls_with_auth/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/cli_tls_with_auth/tiflash/log/error.log arg matches is ArgMatches { args: {"log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/cli_tls_with_auth/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/cli_tls_with_auth/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/cli_tls_with_auth/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + set +x [Sat May 4 22:11:34 CST 2024] <<<<<< START kafka consumer in row_format case >>>>>> + set +x + tso='449532891806040065 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532891806040065 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 22:11:34 CST 2024] <<<<<< START cdc server in many_pk_or_uk case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.many_pk_or_uk.29502952.out server --log-file /tmp/tidb_cdc_test/many_pk_or_uk/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/many_pk_or_uk/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Create changefeed successfully! ID: 4b052d47-3111-4cb1-a1aa-72a44208b952 Info: {"upstream_id":7365146813534688651,"namespace":"default","id":"4b052d47-3111-4cb1-a1aa-72a44208b952","sink_uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-28091?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:11:34.640516165+08:00","start_ts":449532890565312513,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532890565312513,"checkpoint_ts":449532890565312513,"checkpoint_time":"2024-05-04 22:11:28.448"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc883140013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-3jhbg-sn2sk, pid:1484, start at 2024-05-04 22:11:32.708710253 +0800 CST m=+5.287226475 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:32.717 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:32.677 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:32.677 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc885440014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-3jhbg-sn2sk, pid:1568, start at 2024-05-04 22:11:32.840934604 +0800 CST m=+5.359857729 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:32.848 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:32.817 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:32.817 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/resolve_lock/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/resolve_lock/tiflash/log/error.log arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/resolve_lock/tiflash/log/proxy.log"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/resolve_lock/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/resolve_lock/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } check diff successfully check_safepoint_forward http://127.0.0.1:2379 7365146815945053777 449532892070805505 449532890917634050 check diff failed 2-th time, retry later check diff successfully table cdc_tiflash_test.multi_data_type exists check diff failed 1-th time, retry later run task successfully ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x [Sat May 4 22:11:36 CST 2024] <<<<<< START kafka consumer in default_value case >>>>>> go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/google/uuid v1.6.0 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading golang.org/x/sync v0.7.0 go: downloading golang.org/x/time v0.5.0 go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading go.uber.org/atomic v1.11.0 go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading github.com/coreos/go-semver v0.3.1 go: downloading google.golang.org/protobuf v1.33.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading golang.org/x/sys v0.19.0 go: downloading golang.org/x/net v0.24.0 go: downloading github.com/golang/protobuf v1.5.4 go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda The 1 times to try to start tls tidb cluster... start tidb cluster in /tmp/tidb_cdc_test/cli_tls_with_auth Starting TLS PD... check_changefeed_state http://127.0.0.1:2379 dcd941d5-d887-4312-92c3-df6a7e4d50ed stopped null + endpoints=http://127.0.0.1:2379 + changefeed_id=dcd941d5-d887-4312-92c3-df6a7e4d50ed + expected_state=stopped + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c dcd941d5-d887-4312-92c3-df6a7e4d50ed -s go: downloading golang.org/x/text v0.14.0 Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc895b00014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-qf5bw-s1gf0, pid:7132, start at 2024-05-04 22:11:33.910778613 +0800 CST m=+5.274862761 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:33.918 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:33.918 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:33.918 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc895b00014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-qf5bw-s1gf0, pid:7132, start at 2024-05-04 22:11:33.910778613 +0800 CST m=+5.274862761 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:33.918 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:33.918 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:33.918 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc898a00013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-qf5bw-s1gf0, pid:7208, start at 2024-05-04 22:11:34.088259821 +0800 CST m=+5.401155274 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:34.098 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:34.056 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:34.056 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... start tidb cluster in /tmp/tidb_cdc_test/kafka_big_messages_v2 Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... + info='{ "upstream_id": 7365146815945053777, "namespace": "default", "id": "dcd941d5-d887-4312-92c3-df6a7e4d50ed", "state": "stopped", "checkpoint_tso": 449532892595355650, "checkpoint_time": "2024-05-04 22:11:36.192", "error": null }' + echo '{ "upstream_id": 7365146815945053777, "namespace": "default", "id": "dcd941d5-d887-4312-92c3-df6a7e4d50ed", "state": "stopped", "checkpoint_tso": 449532892595355650, "checkpoint_time": "2024-05-04 22:11:36.192", "error": null }' { "upstream_id": 7365146815945053777, "namespace": "default", "id": "dcd941d5-d887-4312-92c3-df6a7e4d50ed", "state": "stopped", "checkpoint_tso": 449532892595355650, "checkpoint_time": "2024-05-04 22:11:36.192", "error": null } ++ echo '{' '"upstream_id":' 7365146815945053777, '"namespace":' '"default",' '"id":' '"dcd941d5-d887-4312-92c3-df6a7e4d50ed",' '"state":' '"stopped",' '"checkpoint_tso":' 449532892595355650, '"checkpoint_time":' '"2024-05-04' '22:11:36.192",' '"error":' null '}' ++ jq -r .state + state=stopped + [[ ! stopped == \s\t\o\p\p\e\d ]] ++ echo '{' '"upstream_id":' 7365146815945053777, '"namespace":' '"default",' '"id":' '"dcd941d5-d887-4312-92c3-df6a7e4d50ed",' '"state":' '"stopped",' '"checkpoint_tso":' 449532892595355650, '"checkpoint_time":' '"2024-05-04' '22:11:36.192",' '"error":' null '}' ++ jq -r .error.message table ddl_manager.finish_mark not exists for 85-th check, retry later Logging trace to /tmp/tidb_cdc_test/ddl_only_block_related_table/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/ddl_only_block_related_table/tiflash/log/error.log arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/ddl_only_block_related_table/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/ddl_only_block_related_table/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/ddl_only_block_related_table/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + message=null + [[ ! null =~ null ]] run task successfully check_safepoint_equal http://127.0.0.1:2379 7365146815945053777 + set +x [Sat May 4 22:11:36 CST 2024] <<<<<< START kafka consumer in sink_retry case >>>>>> Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc) 3727130624 bytes in 6.65 secs (560148263 bytes/sec) [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container check diff successfully [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] sh + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:11:37 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e20f59d5-e843-4ede-9076-c0f57e49450c {"id":"e20f59d5-e843-4ede-9076-c0f57e49450c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831895} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1e01edd e20f59d5-e843-4ede-9076-c0f57e49450c /tidb/cdc/default/default/upstream/7365146830504113698 {"id":7365146830504113698,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e20f59d5-e843-4ede-9076-c0f57e49450c {"id":"e20f59d5-e843-4ede-9076-c0f57e49450c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831895} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1e01edd e20f59d5-e843-4ede-9076-c0f57e49450c /tidb/cdc/default/default/upstream/7365146830504113698 {"id":7365146830504113698,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e20f59d5-e843-4ede-9076-c0f57e49450c {"id":"e20f59d5-e843-4ede-9076-c0f57e49450c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831895} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1e01edd e20f59d5-e843-4ede-9076-c0f57e49450c /tidb/cdc/default/default/upstream/7365146830504113698 {"id":7365146830504113698,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.many_pk_or_uk.cli.3014.out cli changefeed create --start-ts=449532891806040065 '--sink-uri=kafka://127.0.0.1:9092/ticdc-many-pk-or-uk-test-3242?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' check diff successfully Starting TLS TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resolve_lock.cli.2905.out cli tso query --pd=http://127.0.0.1:2379 [Pipeline] sh Create changefeed successfully! ID: 79f41144-b0ed-41a1-87f0-b5f8def84920 Info: {"upstream_id":7365146830504113698,"namespace":"default","id":"79f41144-b0ed-41a1-87f0-b5f8def84920","sink_uri":"kafka://127.0.0.1:9092/ticdc-many-pk-or-uk-test-3242?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:11:38.215350214+08:00","start_ts":449532891806040065,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532891806040065,"checkpoint_ts":449532891806040065,"checkpoint_time":"2024-05-04 22:11:33.181"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... wait process cdc.test exit for 1-th time... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/stretchr/testify v1.9.0 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/tidwall/btree v1.7.0 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/google/btree v1.1.2 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading google.golang.org/api v0.170.0 go: downloading cloud.google.com/go v0.112.2 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading golang.org/x/tools v0.20.0 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/golang/snappy v0.0.4 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading golang.org/x/crypto v0.22.0 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/xitongsys/parquet-go v1.6.0 wait process cdc.test exit for 1-th time... table ddl_manager.finish_mark not exists for 86-th check, retry later + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 wait process cdc.test exit for 2-th time... go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading github.com/jfcg/sixb v1.3.8 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading github.com/robfig/cron v1.2.0 go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/kr/text v0.2.0 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading github.com/apache/thrift v0.16.0 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/klauspost/cpuid v1.3.1 [Sat May 4 22:11:38 CST 2024] <<<<<< START cdc server in ddl_only_block_related_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_only_block_related_table.86128614.out server --log-file /tmp/tidb_cdc_test/ddl_only_block_related_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_only_block_related_table/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:11:39 CST 2024] <<<<<< run test case processor_err_chan success! >>>>>> + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading github.com/go-logr/logr v1.4.1 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/go-logr/stdr v1.2.2 wait process cdc.test exit for 2-th time... go: downloading github.com/DataDog/zstd v1.5.5 go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 wait process cdc.test exit for 3-th time... Starting TLS TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying TLS TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + tso='449532893089497090 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532893089497090 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 22:11:39 CST 2024] <<<<<< START cdc server in resolve_lock case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resolve_lock.29432945.out server --log-file /tmp/tidb_cdc_test/resolve_lock/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/resolve_lock/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + set +x [Sat May 4 22:11:39 CST 2024] <<<<<< START kafka consumer in many_pk_or_uk case >>>>>> go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading go.uber.org/atomic v1.11.0 go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading github.com/coreos/go-semver v0.3.1 go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading golang.org/x/net v0.24.0 go: downloading google.golang.org/protobuf v1.33.0 go: downloading golang.org/x/sys v0.19.0 go: downloading github.com/golang/protobuf v1.5.4 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 22:11:40 CST 2024] <<<<<< run test case tiflash success! >>>>>> run task successfully check_changefeed_state http://127.0.0.1:2379 dcd941d5-d887-4312-92c3-df6a7e4d50ed normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=dcd941d5-d887-4312-92c3-df6a7e4d50ed + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c dcd941d5-d887-4312-92c3-df6a7e4d50ed -s go: downloading golang.org/x/text v0.14.0 + info='{ "upstream_id": 7365146815945053777, "namespace": "default", "id": "dcd941d5-d887-4312-92c3-df6a7e4d50ed", "state": "normal", "checkpoint_tso": 449532892595355650, "checkpoint_time": "2024-05-04 22:11:36.192", "error": null }' + echo '{ "upstream_id": 7365146815945053777, "namespace": "default", "id": "dcd941d5-d887-4312-92c3-df6a7e4d50ed", "state": "normal", "checkpoint_tso": 449532892595355650, "checkpoint_time": "2024-05-04 22:11:36.192", "error": null }' { "upstream_id": 7365146815945053777, "namespace": "default", "id": "dcd941d5-d887-4312-92c3-df6a7e4d50ed", "state": "normal", "checkpoint_tso": 449532892595355650, "checkpoint_time": "2024-05-04 22:11:36.192", "error": null } ++ echo '{' '"upstream_id":' 7365146815945053777, '"namespace":' '"default",' '"id":' '"dcd941d5-d887-4312-92c3-df6a7e4d50ed",' '"state":' '"normal",' '"checkpoint_tso":' 449532892595355650, '"checkpoint_time":' '"2024-05-04' '22:11:36.192",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365146815945053777, '"namespace":' '"default",' '"id":' '"dcd941d5-d887-4312-92c3-df6a7e4d50ed",' '"state":' '"normal",' '"checkpoint_tso":' 449532892595355650, '"checkpoint_time":' '"2024-05-04' '22:11:36.192",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check_safepoint_forward http://127.0.0.1:2379 7365146815945053777 449532892595355649 449532892595355650 table ddl_manager.finish_mark not exists for 87-th check, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) run task successfully check_changefeed_state http://127.0.0.1:2379 dcd941d5-d887-4312-92c3-df6a7e4d50ed stopped null + endpoints=http://127.0.0.1:2379 + changefeed_id=dcd941d5-d887-4312-92c3-df6a7e4d50ed + expected_state=stopped + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c dcd941d5-d887-4312-92c3-df6a7e4d50ed -s VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc8e7b80013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6, pid:1850, start at 2024-05-04 22:11:39.15284133 +0800 CST m=+5.253757523 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:39.161 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:39.167 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:39.167 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc8e7b80013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6, pid:1850, start at 2024-05-04 22:11:39.15284133 +0800 CST m=+5.253757523 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:39.161 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:39.167 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:39.167 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc8e8740015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6, pid:1931, start at 2024-05-04 22:11:39.200430909 +0800 CST m=+5.244561616 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:39.207 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:39.165 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:39.165 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_big_messages/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_big_messages/tiflash/log/error.log arg matches is ArgMatches { args: {"log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_big_messages/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_big_messages/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_big_messages/tiflash-proxy.toml"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + info='{ "upstream_id": 7365146815945053777, "namespace": "default", "id": "dcd941d5-d887-4312-92c3-df6a7e4d50ed", "state": "stopped", "checkpoint_tso": 449532892595355650, "checkpoint_time": "2024-05-04 22:11:36.192", "error": null }' + echo '{ "upstream_id": 7365146815945053777, "namespace": "default", "id": "dcd941d5-d887-4312-92c3-df6a7e4d50ed", "state": "stopped", "checkpoint_tso": 449532892595355650, "checkpoint_time": "2024-05-04 22:11:36.192", "error": null }' { "upstream_id": 7365146815945053777, "namespace": "default", "id": "dcd941d5-d887-4312-92c3-df6a7e4d50ed", "state": "stopped", "checkpoint_tso": 449532892595355650, "checkpoint_time": "2024-05-04 22:11:36.192", "error": null } ++ echo '{' '"upstream_id":' 7365146815945053777, '"namespace":' '"default",' '"id":' '"dcd941d5-d887-4312-92c3-df6a7e4d50ed",' '"state":' '"stopped",' '"checkpoint_tso":' 449532892595355650, '"checkpoint_time":' '"2024-05-04' '22:11:36.192",' '"error":' null '}' ++ jq -r .state + state=stopped + [[ ! stopped == \s\t\o\p\p\e\d ]] ++ echo '{' '"upstream_id":' 7365146815945053777, '"namespace":' '"default",' '"id":' '"dcd941d5-d887-4312-92c3-df6a7e4d50ed",' '"state":' '"stopped",' '"checkpoint_tso":' 449532892595355650, '"checkpoint_time":' '"2024-05-04' '22:11:36.192",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading golang.org/x/sync v0.7.0 go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/google/uuid v1.6.0 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/stretchr/testify v1.9.0 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/tidwall/btree v1.7.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading cloud.google.com/go v0.112.2 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading google.golang.org/api v0.170.0 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/gogo/protobuf v1.3.2 go: downloading golang.org/x/tools v0.20.0 go: downloading github.com/google/btree v1.1.2 go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading golang.org/x/time v0.5.0 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/golang/snappy v0.0.4 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading golang.org/x/crypto v0.22.0 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading github.com/jfcg/sixb v1.3.8 go: downloading github.com/dolthub/maphash v0.1.0 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading github.com/kr/pretty v0.3.1 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading github.com/robfig/cron v1.2.0 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:11:42 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/7bfb73cd-6e08-42a7-85ba-a80ee8212e2e {"id":"7bfb73cd-6e08-42a7-85ba-a80ee8212e2e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831899} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f200e2cc 7bfb73cd-6e08-42a7-85ba-a80ee8212e2e /tidb/cdc/default/default/upstream/7365146867010850524 {"id":7365146867010850524,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/7bfb73cd-6e08-42a7-85ba-a80ee8212e2e {"id":"7bfb73cd-6e08-42a7-85ba-a80ee8212e2e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831899} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f200e2cc 7bfb73cd-6e08-42a7-85ba-a80ee8212e2e /tidb/cdc/default/default/upstream/7365146867010850524 {"id":7365146867010850524,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/7bfb73cd-6e08-42a7-85ba-a80ee8212e2e {"id":"7bfb73cd-6e08-42a7-85ba-a80ee8212e2e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831899} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f200e2cc 7bfb73cd-6e08-42a7-85ba-a80ee8212e2e /tidb/cdc/default/default/upstream/7365146867010850524 {"id":7365146867010850524,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_only_block_related_table.cli.8680.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-common-1-test-26663?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c=ddl-only-block-related-table check_changefeed_state http://127.0.0.1:2379 610760c9-4543-4a29-848e-d6c8319610f5 normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=610760c9-4543-4a29-848e-d6c8319610f5 + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 610760c9-4543-4a29-848e-d6c8319610f5 -s + info='{ "upstream_id": 7365146815945053777, "namespace": "default", "id": "610760c9-4543-4a29-848e-d6c8319610f5", "state": "normal", "checkpoint_tso": 449532894181064709, "checkpoint_time": "2024-05-04 22:11:42.241", "error": null }' + echo '{ "upstream_id": 7365146815945053777, "namespace": "default", "id": "610760c9-4543-4a29-848e-d6c8319610f5", "state": "normal", "checkpoint_tso": 449532894181064709, "checkpoint_time": "2024-05-04 22:11:42.241", "error": null }' { "upstream_id": 7365146815945053777, "namespace": "default", "id": "610760c9-4543-4a29-848e-d6c8319610f5", "state": "normal", "checkpoint_tso": 449532894181064709, "checkpoint_time": "2024-05-04 22:11:42.241", "error": null } ++ echo '{' '"upstream_id":' 7365146815945053777, '"namespace":' '"default",' '"id":' '"610760c9-4543-4a29-848e-d6c8319610f5",' '"state":' '"normal",' '"checkpoint_tso":' 449532894181064709, '"checkpoint_time":' '"2024-05-04' '22:11:42.241",' '"error":' null '}' ++ jq -r .state go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/kr/text v0.2.0 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading github.com/apache/thrift v0.16.0 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible Create changefeed successfully! ID: ddl-only-block-related-table Info: {"upstream_id":7365146867010850524,"namespace":"default","id":"ddl-only-block-related-table","sink_uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-26663?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:11:42.541645108+08:00","start_ts":449532894227464194,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532894227464194,"checkpoint_ts":449532894227464194,"checkpoint_time":"2024-05-04 22:11:42.418"} PASS table ddl_manager.finish_mark not exists for 88-th check, retry later go: downloading github.com/jmespath/go-jmespath v0.4.0 go: downloading github.com/google/s2a-go v0.1.7 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading github.com/felixge/httpsnoop v1.0.4 + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365146815945053777, '"namespace":' '"default",' '"id":' '"610760c9-4543-4a29-848e-d6c8319610f5",' '"state":' '"normal",' '"checkpoint_tso":' 449532894181064709, '"checkpoint_time":' '"2024-05-04' '22:11:42.241",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check_safepoint_equal http://127.0.0.1:2379 7365146815945053777 go: downloading github.com/go-logr/logr v1.4.1 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/go-logr/stdr v1.2.2 go: downloading github.com/DataDog/zstd v1.5.5 go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 coverage: 2.4% of statements in github.com/pingcap/tiflow/... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:11:42 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d7b97f58-cf45-4b6e-abca-90b210b46640 {"id":"d7b97f58-cf45-4b6e-abca-90b210b46640","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831899} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1f65ed5 d7b97f58-cf45-4b6e-abca-90b210b46640 /tidb/cdc/default/default/upstream/7365146860202399852 {"id":7365146860202399852,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d7b97f58-cf45-4b6e-abca-90b210b46640 {"id":"d7b97f58-cf45-4b6e-abca-90b210b46640","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831899} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1f65ed5 d7b97f58-cf45-4b6e-abca-90b210b46640 /tidb/cdc/default/default/upstream/7365146860202399852 {"id":7365146860202399852,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d7b97f58-cf45-4b6e-abca-90b210b46640 {"id":"d7b97f58-cf45-4b6e-abca-90b210b46640","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831899} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f1f65ed5 d7b97f58-cf45-4b6e-abca-90b210b46640 /tidb/cdc/default/default/upstream/7365146860202399852 {"id":7365146860202399852,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resolve_lock.cli.2993.out cli changefeed create --start-ts=449532893089497090 '--sink-uri=kafka://127.0.0.1:9092/ticdc-resolve-lock-test-23495?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: 5978aeee-0fd7-4ffb-8ca9-17e5ffa58b68 Info: {"upstream_id":7365146860202399852,"namespace":"default","id":"5978aeee-0fd7-4ffb-8ca9-17e5ffa58b68","sink_uri":"kafka://127.0.0.1:9092/ticdc-resolve-lock-test-23495?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:11:43.137399947+08:00","start_ts":449532893089497090,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532893089497090,"checkpoint_ts":449532893089497090,"checkpoint_time":"2024-05-04 22:11:38.077"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd + set +x [Sat May 4 22:11:44 CST 2024] <<<<<< START kafka consumer in ddl_only_block_related_table case >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 89-th check, retry later table row_format.finish_mark not exists for 1-th check, retry later + set +x [Sat May 4 22:11:44 CST 2024] <<<<<< START kafka consumer in resolve_lock case >>>>>> go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading go.uber.org/zap v1.27.0 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading go.uber.org/atomic v1.11.0 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b [Sat May 4 22:11:44 CST 2024] <<<<<< START cdc server in kafka_big_messages case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_big_messages.33513353.out server --log-file /tmp/tidb_cdc_test/kafka_big_messages/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_big_messages/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading github.com/coreos/go-semver v0.3.1 go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/google/uuid v1.6.0 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/golang/protobuf v1.5.4 go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/google/btree v1.1.2 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading golang.org/x/sync v0.7.0 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading google.golang.org/protobuf v1.33.0 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading github.com/prometheus/common v0.52.2 go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda go: downloading github.com/prometheus/procfs v0.13.0 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading golang.org/x/sys v0.19.0 go: downloading golang.org/x/net v0.24.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading golang.org/x/text v0.14.0 table row_format.finish_mark not exists for 2-th check, retry later run task successfully go: downloading github.com/google/s2a-go v0.1.7 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading github.com/felixge/httpsnoop v1.0.4 go: downloading github.com/jmespath/go-jmespath v0.4.0 Changefeed remove successfully. ID: dcd941d5-d887-4312-92c3-df6a7e4d50ed CheckpointTs: 449532892595355650 SinkURI: kafka://127.0.0.1:9092/ticdc-gc-safepoint-14704?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 check_safepoint_forward http://127.0.0.1:2379 7365146815945053777 449532892595355649 449532894181064709 449532892595355650 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc946880015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2, pid:3527, start at 2024-05-04 22:11:45.213351066 +0800 CST m=+5.252857779 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:45.222 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:45.186 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:45.186 +0800 All versions after safe point can be accessed. (DO NOT EDIT) + pd_host=127.0.0.1 + pd_port=2579 + is_tls=true + '[' true == true ']' ++ run_cdc_cli tso query --pd=https://127.0.0.1:2579 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.3598.out cli tso query --pd=https://127.0.0.1:2579 table ddl_manager.finish_mark exists table ddl_only_block_related_table.finish_mark not exists for 1-th check, retry later check diff successfully go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd run task successfully wait process cdc.test exit for 1-th time... go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/stretchr/testify v1.9.0 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading github.com/golang/snappy v0.0.4 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/tidwall/btree v1.7.0 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/vbauerster/mpb/v7 v7.5.3 go: downloading golang.org/x/tools v0.20.0 go: downloading golang.org/x/time v0.5.0 go: downloading golang.org/x/term v0.19.0 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/robfig/cron v1.2.0 go: downloading github.com/spf13/cobra v1.8.0 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading github.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0 go: downloading google.golang.org/api v0.170.0 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/Masterminds/semver v1.5.0 go: downloading k8s.io/api v0.28.6 go: downloading cloud.google.com/go v0.112.2 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading github.com/emirpasic/gods v1.18.1 go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading github.com/jfcg/sixb v1.3.8 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading golang.org/x/crypto v0.22.0 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/apache/thrift v0.16.0 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/ncw/directio v1.0.5 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/kr/text v0.2.0 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading github.com/tklauser/numcpus v0.6.1 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/go-logr/logr v1.4.1 go: downloading github.com/go-logr/stdr v1.2.2 Changefeed remove successfully. ID: 610760c9-4543-4a29-848e-d6c8319610f5 CheckpointTs: 449532895216533507 SinkURI: kafka://127.0.0.1:9092/ticdc-gc-safepoint-14704?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 check_safepoint_cleared http://127.0.0.1:2379 7365146815945053777 run task successfully go: downloading github.com/DataDog/zstd v1.5.5 go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 go: downloading k8s.io/apimachinery v0.28.6 wait process cdc.test exit for 2-th time... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:11:47 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/cbdc1426-6944-401f-ae65-61e1e3bf74b2 {"id":"cbdc1426-6944-401f-ae65-61e1e3bf74b2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831904} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f20f85cf cbdc1426-6944-401f-ae65-61e1e3bf74b2 /tidb/cdc/default/default/upstream/7365146879895061184 {"id":7365146879895061184,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/cbdc1426-6944-401f-ae65-61e1e3bf74b2 {"id":"cbdc1426-6944-401f-ae65-61e1e3bf74b2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831904} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f20f85cf cbdc1426-6944-401f-ae65-61e1e3bf74b2 /tidb/cdc/default/default/upstream/7365146879895061184 {"id":7365146879895061184,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/cbdc1426-6944-401f-ae65-61e1e3bf74b2 {"id":"cbdc1426-6944-401f-ae65-61e1e3bf74b2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831904} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f20f85cf cbdc1426-6944-401f-ae65-61e1e3bf74b2 /tidb/cdc/default/default/upstream/7365146879895061184 {"id":7365146879895061184,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: b27b08f8-e4fc-4c9e-8750-5eca1c4d524f Info: {"upstream_id":7365146879895061184,"namespace":"default","id":"b27b08f8-e4fc-4c9e-8750-5eca1c4d524f","sink_uri":"kafka://127.0.0.1:9092/big-message-test?protocol=open-protocol\u0026partition-num=1\u0026kafka-version=2.4.1\u0026max-message-bytes=12582912","create_time":"2024-05-04T22:11:47.65043573+08:00","start_ts":449532894725537793,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532894725537793,"checkpoint_ts":449532894725537793,"checkpoint_time":"2024-05-04 22:11:44.318"} [Sat May 4 22:11:47 CST 2024] <<<<<< START kafka consumer in kafka_big_messages case >>>>>> Starting generate kafka big messages... go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading go.uber.org/atomic v1.11.0 table row_format.finish_mark not exists for 3-th check, retry later go: downloading gopkg.in/inf.v0 v0.9.1 go: downloading k8s.io/klog/v2 v2.120.1 go: downloading github.com/google/gofuzz v1.2.0 go: downloading sigs.k8s.io/structured-merge-diff/v4 v4.4.1 go: downloading sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd go: downloading k8s.io/utils v0.0.0-20230726121419-3b25d923346b cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit + set +x + tso='449532895280758786 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532895280758786 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 22:11:48 CST 2024] <<<<<< START cdc server in cli_tls_with_auth case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates ']' + curl_status_cmd='curl --cacert /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem --cert /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem --key /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem --user ticdc:ticdc_secret -vsL --max-time 20 https://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.36613663.out server --log-file /tmp/tidb_cdc_test/cli_tls_with_auth/cdc_cli_tls_with_auth_tls1.log --log-level debug --data-dir /tmp/tidb_cdc_test/cli_tls_with_auth/cdc_data_cli_tls_with_auth_tls1 --cluster-id default --config /tmp/tidb_cdc_test/cli_tls_with_auth/server.toml --ca /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem --cert /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/server.pem --key /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/server-key.pem --cert-allowed-cn client --addr 127.0.0.1:8300 --pd https://127.0.0.1:2579 ++ curl --cacert /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem --cert /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem --key /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem --user ticdc:ticdc_secret -vsL --max-time 20 https://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 wait process cdc.test exit for 1-th time... [Sat May 4 22:11:48 CST 2024] <<<<<< run test case ddl_manager success! >>>>>> wait process cdc.test exit for 2-th time... table ddl_only_block_related_table.finish_mark not exists for 2-th check, retry later cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:11:49 CST 2024] <<<<<< run test case gc_safepoint success! >>>>>> VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc979e00005 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-n5hp4-vjsxk, pid:2141, start at 2024-05-04 22:11:48.478489848 +0800 CST m=+5.550095299 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:48.486 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:48.472 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:48.472 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] } [Pipeline] } [Pipeline] } table row_format.finish_mark not exists for 4-th check, retry later [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] sh [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G17 Run cases: clustered_index processor_resolved_ts_fallback PROW_JOB_ID=760730ca-f4a4-4d97-abc9-636483ce777c JENKINS_NODE_COOKIE=87e2ce37-5f8a-418c-97da-28af642cf681 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9mrfb-0nnvt HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786756543966875649","prowjobid":"760730ca-f4a4-4d97-abc9-636483ce777c","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"6a342866deda3271b067f649c64b771bbe3d2a00","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/6a342866deda3271b067f649c64b771bbe3d2a00","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1853 TEST_GROUP=G17 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786756543966875649 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-9mrfb GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9mrfb-0nnvt pingcap_tiflow_pull_cdc_integration_kafka_test_1853-9mrfb GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9mrfb-0nnvt GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1853 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/clustered_index/run.sh using Sink-Type: kafka... <<================= [Sat May 4 22:11:51 CST 2024] <<<<<< skip test case clustered_index for kafka! >>>>>> find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_resolved_ts_fallback/run.sh using Sink-Type: kafka... <<================= [Sat May 4 22:11:51 CST 2024] <<<<<< run test case processor_resolved_ts_fallback success! >>>>>> table ddl_only_block_related_table.finish_mark not exists for 3-th check, retry later [Pipeline] sh VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc979e00005 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-n5hp4-vjsxk, pid:2141, start at 2024-05-04 22:11:48.478489848 +0800 CST m=+5.550095299 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:48.486 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:48.472 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:48.472 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fc97aa8000b Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-n5hp4-vjsxk, pid:2197, start at 2024-05-04 22:11:48.534185095 +0800 CST m=+5.527975018 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:13:48.543 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:11:48.522 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:01:48.522 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } go: downloading github.com/jmespath/go-jmespath v0.4.0 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/google/s2a-go v0.1.7 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading github.com/felixge/httpsnoop v1.0.4 + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G15 Run cases: new_ci_collation batch_add_table multi_rocks PROW_JOB_ID=760730ca-f4a4-4d97-abc9-636483ce777c JENKINS_NODE_COOKIE=34430319-fb28-47ab-a197-140e7b746860 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-jvsw5-4r66k HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786756543966875649","prowjobid":"760730ca-f4a4-4d97-abc9-636483ce777c","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"6a342866deda3271b067f649c64b771bbe3d2a00","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/6a342866deda3271b067f649c64b771bbe3d2a00","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1853 TEST_GROUP=G15 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786756543966875649 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-jvsw5 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-jvsw5 pingcap-tiflow-pull-cdc-integration-kafka-test-1853-jvsw5-4r66k GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-jvsw5-4r66k GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1853 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/new_ci_collation/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... + (( i++ )) + (( i <= 50 )) ++ curl --cacert /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem --cert /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem --key /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem --user ticdc:ticdc_secret -vsL --max-time 20 https://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Initializing NSS with certpath: sql:/etc/pki/nssdb * CAfile: /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem CApath: none * NSS: client certificate from file * subject: CN=client * start date: Feb 18 07:48:00 2020 GMT * expire date: Jan 25 07:48:00 2120 GMT * common name: client * issuer: CN=My own CA,O=PingCAP,L=Beijing,ST=Beijing,C=CN * SSL connection using TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 * Server certificate: * subject: CN=tidb-server * start date: Feb 18 09:11:00 2020 GMT * expire date: Jan 25 09:11:00 2120 GMT * common name: tidb-server * issuer: CN=My own CA,O=PingCAP,L=Beijing,ST=Beijing,C=CN * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:11:51 GMT < Content-Length: 1233 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/81e7c37f-a0ba-4a08-87f0-caa55f9a113d {"id":"81e7c37f-a0ba-4a08-87f0-caa55f9a113d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831908} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/1f5d8f43f2306083 81e7c37f-a0ba-4a08-87f0-caa55f9a113d /tidb/cdc/default/default/upstream/7365146918449229766 {"id":7365146918449229766,"pd-endpoints":"https://127.0.0.1:2579,https://127.0.0.1:2579","key-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/server-key.pem","cert-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/server.pem","ca-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem","cert-allowed-cn":["client","tidb-server"]}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/81e7c37f-a0ba-4a08-87f0-caa55f9a113d {"id":"81e7c37f-a0ba-4a08-87f0-caa55f9a113d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831908} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/1f5d8f43f2306083 81e7c37f-a0ba-4a08-87f0-caa55f9a113d /tidb/cdc/default/default/upstream/7365146918449229766 {"id":7365146918449229766,"pd-endpoints":"https://127.0.0.1:2579,https://127.0.0.1:2579","key-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/server-key.pem","cert-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/server.pem","ca-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem","cert-allowed-cn":["client","tidb-server"]}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/81e7c37f-a0ba-4a08-87f0-caa55f9a113d {"id":"81e7c37f-a0ba-4a08-87f0-caa55f9a113d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831908} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/1f5d8f43f2306083 81e7c37f-a0ba-4a08-87f0-caa55f9a113d /tidb/cdc/default/default/upstream/7365146918449229766 {"id":7365146918449229766,"pd-endpoints":"https://127.0.0.1:2579,https://127.0.0.1:2579","key-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/server-key.pem","cert-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/server.pem","ca-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem","cert-allowed-cn":["client","tidb-server"]}' + grep -q 'etcd info' + break + set +x + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G16 Run cases: owner_resign processor_etcd_worker_delay sink_hang PROW_JOB_ID=760730ca-f4a4-4d97-abc9-636483ce777c JENKINS_NODE_COOKIE=350191d5-df73-4ebe-bdb9-dbe0e4159e17 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-kzqfw-6sn5g HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786756543966875649","prowjobid":"760730ca-f4a4-4d97-abc9-636483ce777c","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"6a342866deda3271b067f649c64b771bbe3d2a00","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/6a342866deda3271b067f649c64b771bbe3d2a00","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1853 TEST_GROUP=G16 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786756543966875649 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1853/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1853-kzqfw GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-kzqfw-6sn5g pingcap_tiflow_pull_cdc_integration_kafka_test_1853-kzqfw GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1853-kzqfw-6sn5g GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1853 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/owner_resign/run.sh using Sink-Type: kafka... <<================= [Sat May 4 22:11:51 CST 2024] <<<<<< run test case owner_resign success! >>>>>> table row_format.finish_mark not exists for 5-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/region_merge/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table ddl_only_block_related_table.finish_mark exists wait process 8617 exit for 1-th time... table row_format.finish_mark not exists for 6-th check, retry later wait process 8617 exit for 2-th time... [Sat May 4 22:11:53 CST 2024] <<<<<< START cdc server in kafka_big_messages_v2 case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) + GO_FAILPOINTS= + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_big_messages_v2.35443546.out server --log-file /tmp/tidb_cdc_test/kafka_big_messages_v2/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_big_messages_v2/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.3719.out cli changefeed create --start-ts=449532895280758786 '--sink-uri=kafka://127.0.0.1:9092/ticdc-cli-test-11699?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --tz=Asia/Shanghai -c=custom-changefeed-name [WARN] --tz is deprecated in changefeed settings. Create changefeed successfully! ID: custom-changefeed-name Info: {"upstream_id":7365146918449229766,"namespace":"default","id":"custom-changefeed-name","sink_uri":"kafka://127.0.0.1:9092/ticdc-cli-test-11699?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:11:54.059497905+08:00","start_ts":449532895280758786,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532895280758786,"checkpoint_ts":449532895280758786,"checkpoint_time":"2024-05-04 22:11:46.436"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils/kill_cdc_pid: line 19: kill: (8617) - No such process wait process 8617 exit for 3-th time... process 8617 already exit [Sat May 4 22:11:54 CST 2024] <<<<<< START cdc server in ddl_only_block_related_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/ExecuteNotDone=return(true)' + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_only_block_related_table.87948796.out server --log-file /tmp/tidb_cdc_test/ddl_only_block_related_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_only_block_related_table/cdc_data --cluster-id default + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table kafka_big_messages.test exists check diff failed 1-th time, retry later start tidb cluster in /tmp/tidb_cdc_test/new_ci_collation Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... + set +x [Sat May 4 22:11:55 CST 2024] <<<<<< START kafka consumer in cli_tls_with_auth case >>>>>> table test.simple not exists for 1-th check, retry later table row_format.finish_mark not exists for 7-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/region_merge Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Verifying downstream PD is started... check diff failed 2-th time, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:11:56 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d88d9119-76c6-4e09-84b6-1533a74dde5d {"id":"d88d9119-76c6-4e09-84b6-1533a74dde5d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831913} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f230f2cd d88d9119-76c6-4e09-84b6-1533a74dde5d /tidb/cdc/default/default/upstream/7365146925796424316 {"id":7365146925796424316,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d88d9119-76c6-4e09-84b6-1533a74dde5d {"id":"d88d9119-76c6-4e09-84b6-1533a74dde5d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831913} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f230f2cd d88d9119-76c6-4e09-84b6-1533a74dde5d /tidb/cdc/default/default/upstream/7365146925796424316 {"id":7365146925796424316,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d88d9119-76c6-4e09-84b6-1533a74dde5d {"id":"d88d9119-76c6-4e09-84b6-1533a74dde5d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831913} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f230f2cd d88d9119-76c6-4e09-84b6-1533a74dde5d /tidb/cdc/default/default/upstream/7365146925796424316 {"id":7365146925796424316,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: 1fed46a8-fe81-43c2-8ff4-404eecbf4dce Info: {"upstream_id":7365146925796424316,"namespace":"default","id":"1fed46a8-fe81-43c2-8ff4-404eecbf4dce","sink_uri":"kafka://127.0.0.1:9092/big-message-test?protocol=open-protocol\u0026partition-num=1\u0026kafka-version=2.4.1\u0026max-message-bytes=12582912","create_time":"2024-05-04T22:11:56.719630897+08:00","start_ts":449532897098989569,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532897098989569,"checkpoint_ts":449532897098989569,"checkpoint_time":"2024-05-04 22:11:53.372"} [Sat May 4 22:11:56 CST 2024] <<<<<< START kafka consumer in kafka_big_messages_v2 case >>>>>> Starting generate kafka big messages... go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading go.uber.org/atomic v1.11.0 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_reconstruct/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... \033[0;36m<<< Run all test success >>>\033[0m [Pipeline] } + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:11:57 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: changefeedID: default/ddl-only-block-related-table {UpstreamID:7365146867010850524 Namespace:default ID:ddl-only-block-related-table SinkURI:kafka://127.0.0.1:9092/ticdc-common-1-test-26663?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 22:11:42.541645108 +0800 CST StartTs:449532894227464194 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc00318c240 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-52-g6a342866d Epoch:449532894253678596} {CheckpointTs:449532897976123393 MinTableBarrierTs:449532897976123394 AdminJobType:noop} span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449532897976123393, checkpointTs: 449532897976123393, state: Replicating span: {table_id:108,start_key:7480000000000000ff6c5f720000000000fa,end_key:7480000000000000ff6c5f730000000000fa}, resolvedTs: 449532897976123393, checkpointTs: 449532897976123393, state: Replicating span: {table_id:110,start_key:7480000000000000ff6e5f720000000000fa,end_key:7480000000000000ff6e5f730000000000fa}, resolvedTs: 449532897976123393, checkpointTs: 449532897976123393, state: Replicating span: {table_id:112,start_key:7480000000000000ff705f720000000000fa,end_key:7480000000000000ff705f730000000000fa}, resolvedTs: 449532897976123393, checkpointTs: 449532897976123393, state: Replicating *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/30dca4af-47b6-4aa5-833b-0c96960fa907 {"id":"30dca4af-47b6-4aa5-833b-0c96960fa907","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831914} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f200e3a8 30dca4af-47b6-4aa5-833b-0c96960fa907 /tidb/cdc/default/default/changefeed/info/ddl-only-block-related-table {"upstream-id":7365146867010850524,"namespace":"default","changefeed-id":"ddl-only-block-related-table","sink-uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-26663?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:11:42.541645108+08:00","start-ts":449532894227464194,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532894253678596} /tidb/cdc/default/default/changefeed/status/ddl-only-block-related-table {"checkpoint-ts":449532897976123393,"min-table-barrier-ts":449532897976123394,"admin-job-type":0} /tidb/cdc/default/default/task/position/30dca4af-47b6-4aa5-833b-0c96960fa907/ddl-only-block-related-table {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365146867010850524 {"id":7365146867010850524,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-only-block-related-table {UpstreamID:7365146867010850524 Namespace:default ID:ddl-only-block-related-table SinkURI:kafka://127.0.0.1:9092/ticdc-common-1-test-26663?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 22:11:42.541645108 +0800 CST StartTs:449532894227464194 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc00318c240 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-52-g6a342866d Epoch:449532894253678596} {CheckpointTs:449532897976123393 MinTableBarrierTs:449532897976123394 AdminJobType:noop} span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449532897976123393, checkpointTs: 449532897976123393, state: Replicating span: {table_id:108,start_key:7480000000000000ff6c5f720000000000fa,end_key:7480000000000000ff6c5f730000000000fa}, resolvedTs: 449532897976123393, checkpointTs: 449532897976123393, state: Replicating span: {table_id:110,start_key:7480000000000000ff6e5f720000000000fa,end_key:7480000000000000ff6e5f730000000000fa}, resolvedTs: 449532897976123393, checkpointTs: 449532897976123393, state: Replicating span: {table_id:112,start_key:7480000000000000ff705f720000000000fa,end_key:7480000000000000ff705f730000000000fa}, resolvedTs: 449532897976123393, checkpointTs: 449532897976123393, state: Replicating *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/30dca4af-47b6-4aa5-833b-0c96960fa907 {"id":"30dca4af-47b6-4aa5-833b-0c96960fa907","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831914} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f200e3a8 30dca4af-47b6-4aa5-833b-0c96960fa907 /tidb/cdc/default/default/changefeed/info/ddl-only-block-related-table {"upstream-id":7365146867010850524,"namespace":"default","changefeed-id":"ddl-only-block-related-table","sink-uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-26663?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:11:42.541645108+08:00","start-ts":449532894227464194,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532894253678596} /tidb/cdc/default/default/changefeed/status/ddl-only-block-related-table {"checkpoint-ts":449532897976123393,"min-table-barrier-ts":449532897976123394,"admin-job-type":0} /tidb/cdc/default/default/task/position/30dca4af-47b6-4aa5-833b-0c96960fa907/ddl-only-block-related-table {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365146867010850524 {"id":7365146867010850524,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-only-block-related-table {UpstreamID:7365146867010850524 Namespace:default ID:ddl-only-block-related-table SinkURI:kafka://127.0.0.1:9092/ticdc-common-1-test-26663?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 22:11:42.541645108 +0800 CST StartTs:449532894227464194 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc00318c240 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-52-g6a342866d Epoch:449532894253678596} {CheckpointTs:449532897976123393 MinTableBarrierTs:449532897976123394 AdminJobType:noop} span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449532897976123393, checkpointTs: 449532897976123393, state: Replicating span: {table_id:108,start_key:7480000000000000ff6c5f720000000000fa,end_key:7480000000000000ff6c5f730000000000fa}, resolvedTs: 449532897976123393, checkpointTs: 449532897976123393, state: Replicating span: {table_id:110,start_key:7480000000000000ff6e5f720000000000fa,end_key:7480000000000000ff6e5f730000000000fa}, resolvedTs: 449532897976123393, checkpointTs: 449532897976123393, state: Replicating span: {table_id:112,start_key:7480000000000000ff705f720000000000fa,end_key:7480000000000000ff705f730000000000fa}, resolvedTs: 449532897976123393, checkpointTs: 449532897976123393, state: Replicating *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/30dca4af-47b6-4aa5-833b-0c96960fa907 {"id":"30dca4af-47b6-4aa5-833b-0c96960fa907","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831914} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f200e3a8 30dca4af-47b6-4aa5-833b-0c96960fa907 /tidb/cdc/default/default/changefeed/info/ddl-only-block-related-table {"upstream-id":7365146867010850524,"namespace":"default","changefeed-id":"ddl-only-block-related-table","sink-uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-26663?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:11:42.541645108+08:00","start-ts":449532894227464194,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532894253678596} /tidb/cdc/default/default/changefeed/status/ddl-only-block-related-table {"checkpoint-ts":449532897976123393,"min-table-barrier-ts":449532897976123394,"admin-job-type":0} /tidb/cdc/default/default/task/position/30dca4af-47b6-4aa5-833b-0c96960fa907/ddl-only-block-related-table {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365146867010850524 {"id":7365146867010850524,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + break + set +x Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc already exists) [Pipeline] // cache [Pipeline] } [Pipeline] // dir check_ts_not_forward ddl-only-block-related-table [Pipeline] } [Pipeline] // withCredentials [Pipeline] } table test.simple not exists for 2-th check, retry later [Pipeline] // timeout table row_format.finish_mark not exists for 8-th check, retry later find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_etcd_worker_delay/run.sh using Sink-Type: kafka... <<================= [Sat May 4 22:11:56 CST 2024] <<<<<< run test case processor_etcd_worker_delay success! >>>>>> [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [Pipeline] // podTemplate [Pipeline] } check diff successfully [Pipeline] // withEnv start tidb cluster in /tmp/tidb_cdc_test/changefeed_reconstruct Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... [Pipeline] } Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [Pipeline] // stage [Pipeline] } wait process cdc.test exit for 1-th time... table test.simple exists table test.`simple-dash` exists + endpoints=https://127.0.0.1:2579 + changefeed_id=custom-changefeed-name + expected_state=normal + error_msg=null + tls_dir=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates + [[ https://127.0.0.1:2579 =~ https ]] ++ cdc cli changefeed query --ca=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem --cert=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem --key=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem --pd=https://127.0.0.1:2579 -c custom-changefeed-name -s + info='{ "upstream_id": 7365146918449229766, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449532895700189205, "checkpoint_time": "2024-05-04 22:11:48.036", "error": null }' + echo '{ "upstream_id": 7365146918449229766, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449532895700189205, "checkpoint_time": "2024-05-04 22:11:48.036", "error": null }' { "upstream_id": 7365146918449229766, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449532895700189205, "checkpoint_time": "2024-05-04 22:11:48.036", "error": null } ++ echo '{' '"upstream_id":' 7365146918449229766, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449532895700189205, '"checkpoint_time":' '"2024-05-04' '22:11:48.036",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365146918449229766, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449532895700189205, '"checkpoint_time":' '"2024-05-04' '22:11:48.036",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] table row_format.finish_mark not exists for 9-th check, retry later wait process cdc.test exit for 2-th time... changefeed count 1 check pass, pd_addr: https://127.0.0.1:2579 Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_source/run.sh using Sink-Type: kafka... <<================= cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:12:00 CST 2024] <<<<<< run test case kafka_big_messages success! >>>>>> Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Error: [CDC:ErrChangefeedUpdateRefused]changefeed update error: can only update changefeed config when it is stopped or failed update changefeed config should fail when changefeed is running, got Diff of changefeed config: {Type:update Path:[Config CaseSensitive] From:false To:true} {Type:update Path:[Config SyncPointInterval] From: To:0xc0019038a0} {Type:update Path:[Config SyncPointRetention] From: To:0xc0019038a8} {Type:update Path:[Config Consistent] From: To:0xc001264e70} {Type:update Path:[Config Scheduler EnableTableAcrossNodes] From:false To:true} + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.3982.out cli changefeed --changefeed-id custom-changefeed-name pause PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... table row_format.finish_mark not exists for 10-th check, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) The 1 times to try to start tidb cluster... run task failed 1-th time, retry later + set +x table kafka_big_messages.test exists check diff failed 1-th time, retry later table row_format.finish_mark exists check diff successfully Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 3-th time... check diff failed 2-th time, retry later check_ts_not_forward ddl-only-block-related-table cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 22:12:05 CST 2024] <<<<<< run test case row_format success! >>>>>> + endpoints=https://127.0.0.1:2579 + changefeed_id=custom-changefeed-name + expected_state=stopped + error_msg=null + tls_dir=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates + [[ https://127.0.0.1:2579 =~ https ]] ++ cdc cli changefeed query --ca=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem --cert=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem --key=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem --pd=https://127.0.0.1:2579 -c custom-changefeed-name -s find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sink_hang/run.sh using Sink-Type: kafka... <<================= [Sat May 4 22:12:06 CST 2024] <<<<<< run test case sink_hang success! >>>>>> + info='{ "upstream_id": 7365146918449229766, "namespace": "default", "id": "custom-changefeed-name", "state": "stopped", "checkpoint_tso": 449532899108323329, "checkpoint_time": "2024-05-04 22:12:01.037", "error": null }' + echo '{ "upstream_id": 7365146918449229766, "namespace": "default", "id": "custom-changefeed-name", "state": "stopped", "checkpoint_tso": 449532899108323329, "checkpoint_time": "2024-05-04 22:12:01.037", "error": null }' { "upstream_id": 7365146918449229766, "namespace": "default", "id": "custom-changefeed-name", "state": "stopped", "checkpoint_tso": 449532899108323329, "checkpoint_time": "2024-05-04 22:12:01.037", "error": null } ++ echo '{' '"upstream_id":' 7365146918449229766, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"stopped",' '"checkpoint_tso":' 449532899108323329, '"checkpoint_time":' '"2024-05-04' '22:12:01.037",' '"error":' null '}' ++ jq -r .state start tidb cluster in /tmp/tidb_cdc_test/multi_source Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... + state=stopped + [[ ! stopped == \s\t\o\p\p\e\d ]] ++ echo '{' '"upstream_id":' 7365146918449229766, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"stopped",' '"checkpoint_tso":' 449532899108323329, '"checkpoint_time":' '"2024-05-04' '22:12:01.037",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.4067.out cli changefeed update --pd=https://127.0.0.1:2579 --config=/tmp/tidb_cdc_test/cli_tls_with_auth/changefeed.toml --no-confirm --changefeed-id custom-changefeed-name ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fca83e00011 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-jvsw5-4r66k, pid:1411, start at 2024-05-04 22:12:05.512082438 +0800 CST m=+5.042629314 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:05.518 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:05.496 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:05.496 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fca83e00011 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-jvsw5-4r66k, pid:1411, start at 2024-05-04 22:12:05.512082438 +0800 CST m=+5.042629314 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:05.518 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:05.496 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:05.496 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fca85540015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-jvsw5-4r66k, pid:1487, start at 2024-05-04 22:12:05.631876611 +0800 CST m=+5.111617949 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:05.638 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:05.639 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:05.639 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/new_ci_collation/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/new_ci_collation/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/new_ci_collation/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/new_ci_collation/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/new_ci_collation/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Diff of changefeed config: {Type:update Path:[Config CaseSensitive] From:false To:true} {Type:update Path:[Config SyncPointInterval] From: To:0xc0019034d8} {Type:update Path:[Config SyncPointRetention] From: To:0xc0019035e8} {Type:update Path:[Config Consistent] From: To:0xc00121e380} {Type:update Path:[Config Scheduler EnableTableAcrossNodes] From:false To:true} Update changefeed config successfully! ID: custom-changefeed-name Info: {"upstream_id":7365146918449229766,"namespace":"default","id":"custom-changefeed-name","sink_uri":"kafka://127.0.0.1:9092/ticdc-cli-test-11699?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:11:54.059497905+08:00","start_ts":449532895280758786,"admin_job_type":1,"config":{"memory_quota":1073741824,"case_sensitive":true,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_table_monitor":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","encoder_concurrency":32,"terminator":"\r\n","enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":true,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"stopped","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":0,"checkpoint_ts":449532899108323329,"checkpoint_time":"2024-05-04 22:12:01.037"} PASS coverage: 2.8% of statements in github.com/pingcap/tiflow/... =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_pause_resume/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... check diff successfully wait process cdc.test exit for 1-th time... check diff failed 1-th time, retry later check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:12:07 CST 2024] <<<<<< run test case kv_client_stream_reconnect success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x wait process cdc.test exit for 2-th time... Verifying downstream PD is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fca89d40014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9b5lh-d8gp8, pid:4296, start at 2024-05-04 22:12:05.893324589 +0800 CST m=+5.405935787 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:05.906 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:05.877 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:05.877 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fca89d40014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9b5lh-d8gp8, pid:4296, start at 2024-05-04 22:12:05.893324589 +0800 CST m=+5.405935787 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:05.906 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:05.877 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:05.877 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fca89840005 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9b5lh-d8gp8, pid:4374, start at 2024-05-04 22:12:05.859843263 +0800 CST m=+5.326446278 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:05.866 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:05.857 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:05.857 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/region_merge/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/region_merge/tiflash/log/error.log arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/region_merge/tiflash-proxy.toml"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/region_merge/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/region_merge/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [Sat May 4 22:12:08 CST 2024] <<<<<< START cdc server in new_ci_collation case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.new_ci_collation.28722874.out server --log-file /tmp/tidb_cdc_test/new_ci_collation/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/new_ci_collation/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.4106.out cli changefeed --changefeed-id custom-changefeed-name resume table sink_retry.finish_mark_1 exists check diff successfully ***************** properties ***************** "threadcount"="2" "requestdistribution"="uniform" "workload"="core" "mysql.user"="root" "readproportion"="0" "scanproportion"="0" "readallfields"="true" "insertproportion"="0" "recordcount"="10" "dotransactions"="false" "operationcount"="0" "mysql.db"="sink_retry" "mysql.host"="127.0.0.1" "updateproportion"="0" "mysql.port"="4000" ********************************************** Run finished, takes 4.813432ms INSERT - Takes(s): 0.0, Count: 10, OPS: 2933.1, Avg(us): 833, Min(us): 537, Max(us): 2060, 95th(us): 3000, 99th(us): 3000 cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:12:09 CST 2024] <<<<<< run test case kafka_big_messages_v2 success! >>>>>> PASS coverage: 2.1% of statements in github.com/pingcap/tiflow/... start tidb cluster in /tmp/tidb_cdc_test/changefeed_pause_resume Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release \033[0;36m<<< Run all test success >>>\033[0m VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcac8180015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc, pid:4377, start at 2024-05-04 22:12:09.907890742 +0800 CST m=+5.947508170 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:09.915 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:09.913 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:09.913 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcac8180015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc, pid:4377, start at 2024-05-04 22:12:09.907890742 +0800 CST m=+5.947508170 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:09.915 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:09.913 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:09.913 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcac0440005 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc, pid:4466, start at 2024-05-04 22:12:09.362918518 +0800 CST m=+5.345486649 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:09.371 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:09.361 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:09.361 +0800 All versions after safe point can be accessed. (DO NOT EDIT) [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc already exists) [Pipeline] // cache [Pipeline] } Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/changefeed_reconstruct/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/changefeed_reconstruct/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_reconstruct/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_reconstruct/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_reconstruct/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [Pipeline] // dir [Pipeline] } [Sat May 4 22:12:10 CST 2024] <<<<<< START cdc server in region_merge case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.region_merge.57095711.out server --log-file /tmp/tidb_cdc_test/region_merge/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/region_merge/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [Pipeline] // withCredentials [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // container [Pipeline] } + set +x [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] } Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:12:12 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e0c738e6-4abf-43da-a099-8a9680286a04 {"id":"e0c738e6-4abf-43da-a099-8a9680286a04","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831929} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2775acb e0c738e6-4abf-43da-a099-8a9680286a04 /tidb/cdc/default/default/upstream/7365146996501343562 {"id":7365146996501343562,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e0c738e6-4abf-43da-a099-8a9680286a04 {"id":"e0c738e6-4abf-43da-a099-8a9680286a04","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831929} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2775acb e0c738e6-4abf-43da-a099-8a9680286a04 /tidb/cdc/default/default/upstream/7365146996501343562 {"id":7365146996501343562,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e0c738e6-4abf-43da-a099-8a9680286a04 {"id":"e0c738e6-4abf-43da-a099-8a9680286a04","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831929} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2775acb e0c738e6-4abf-43da-a099-8a9680286a04 /tidb/cdc/default/default/upstream/7365146996501343562 {"id":7365146996501343562,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: cde7f3c0-de2a-4581-a67f-08a43e805754 Info: {"upstream_id":7365146996501343562,"namespace":"default","id":"cde7f3c0-de2a-4581-a67f-08a43e805754","sink_uri":"kafka://127.0.0.1:9092/ticdc-new_ci_collation-test-260?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:12:12.250593179+08:00","start_ts":449532901168775169,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532901168775169,"checkpoint_ts":449532901168775169,"checkpoint_time":"2024-05-04 22:12:08.897"} [Sat May 4 22:12:12 CST 2024] <<<<<< START kafka consumer in new_ci_collation case >>>>>> =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Sat May 4 22:12:12 CST 2024] <<<<<< START cdc server in changefeed_reconstruct case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_reconstruct.58355837.out server --log-file /tmp/tidb_cdc_test/changefeed_reconstruct/cdcserver1.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_reconstruct/cdc_dataserver1 --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:12:13 GMT < Content-Length: 859 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b79935c9-9c24-4dfc-a329-50842a389061 {"id":"b79935c9-9c24-4dfc-a329-50842a389061","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831931} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f27d479f b79935c9-9c24-4dfc-a329-50842a389061 /tidb/cdc/default/default/upstream/7365147003213638992 {"id":7365147003213638992,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b79935c9-9c24-4dfc-a329-50842a389061 {"id":"b79935c9-9c24-4dfc-a329-50842a389061","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831931} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f27d479f b79935c9-9c24-4dfc-a329-50842a389061 /tidb/cdc/default/default/upstream/7365147003213638992 {"id":7365147003213638992,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b79935c9-9c24-4dfc-a329-50842a389061 {"id":"b79935c9-9c24-4dfc-a329-50842a389061","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831931} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f27d479f b79935c9-9c24-4dfc-a329-50842a389061 /tidb/cdc/default/default/upstream/7365147003213638992 {"id":7365147003213638992,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Create changefeed successfully! ID: 71ed6ec4-3e90-41f8-9dc3-d8ec7d644563 Info: {"upstream_id":7365147003213638992,"namespace":"default","id":"71ed6ec4-3e90-41f8-9dc3-d8ec7d644563","sink_uri":"kafka://127.0.0.1:9092/ticdc-region-merge-test-27220?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:12:14.108884839+08:00","start_ts":449532902487359495,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532902487359495,"checkpoint_ts":449532902487359495,"checkpoint_time":"2024-05-04 22:12:13.927"} [Sat May 4 22:12:14 CST 2024] <<<<<< START kafka consumer in region_merge case >>>>>> + endpoints=https://127.0.0.1:2579 + changefeed_id=custom-changefeed-name + expected_state=normal + error_msg=null + tls_dir=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates + [[ https://127.0.0.1:2579 =~ https ]] ++ cdc cli changefeed query --ca=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem --cert=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem --key=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem --pd=https://127.0.0.1:2579 -c custom-changefeed-name -s + info='{ "upstream_id": 7365146918449229766, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449532902254051330, "checkpoint_time": "2024-05-04 22:12:13.037", "error": null }' + echo '{ "upstream_id": 7365146918449229766, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449532902254051330, "checkpoint_time": "2024-05-04 22:12:13.037", "error": null }' { "upstream_id": 7365146918449229766, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449532902254051330, "checkpoint_time": "2024-05-04 22:12:13.037", "error": null } ++ echo '{' '"upstream_id":' 7365146918449229766, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449532902254051330, '"checkpoint_time":' '"2024-05-04' '22:12:13.037",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365146918449229766, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449532902254051330, '"checkpoint_time":' '"2024-05-04' '22:12:13.037",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.4199.out cli changefeed --changefeed-id custom-changefeed-name remove ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) split_and_random_merge scale: 20 start tidb cluster in /tmp/tidb_cdc_test/kafka_compression Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Changefeed remove successfully. ID: custom-changefeed-name CheckpointTs: 449532902516195331 SinkURI: kafka://127.0.0.1:9092/ticdc-cli-test-11699?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:12:15 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ceac50f0-7932-4f16-9f9b-396da3f9ae34 {"id":"ceac50f0-7932-4f16-9f9b-396da3f9ae34","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831933} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2870ac8 ceac50f0-7932-4f16-9f9b-396da3f9ae34 /tidb/cdc/default/default/upstream/7365147017480855735 {"id":7365147017480855735,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ceac50f0-7932-4f16-9f9b-396da3f9ae34 {"id":"ceac50f0-7932-4f16-9f9b-396da3f9ae34","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831933} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2870ac8 ceac50f0-7932-4f16-9f9b-396da3f9ae34 /tidb/cdc/default/default/upstream/7365147017480855735 {"id":7365147017480855735,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ceac50f0-7932-4f16-9f9b-396da3f9ae34 {"id":"ceac50f0-7932-4f16-9f9b-396da3f9ae34","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831933} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2870ac8 ceac50f0-7932-4f16-9f9b-396da3f9ae34 /tidb/cdc/default/default/upstream/7365147017480855735 {"id":7365147017480855735,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Starting Upstream TiDB... [Sat May 4 22:12:16 CST 2024] <<<<<< START kafka consumer in changefeed_reconstruct case >>>>>> Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ***************** properties ***************** "threadcount"="4" "readproportion"="0" "mysql.host"="127.0.0.1" "operationcount"="0" "insertproportion"="0" "mysql.user"="root" "readallfields"="true" "recordcount"="50" "scanproportion"="0" "updateproportion"="0" "requestdistribution"="uniform" "dotransactions"="false" "workload"="core" "mysql.db"="changefeed_reconstruct" "mysql.port"="4000" ********************************************** Run finished, takes 17.204999ms INSERT - Takes(s): 0.0, Count: 48, OPS: 3628.4, Avg(us): 1374, Min(us): 885, Max(us): 3933, 95th(us): 4000, 99th(us): 4000 table changefeed_reconstruct.usertable not exists for 1-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x Verifying downstream PD is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table changefeed_reconstruct.usertable not exists for 2-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcb3a500013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-c2q9n-lf6gc, pid:7535, start at 2024-05-04 22:12:17.218523101 +0800 CST m=+5.224363304 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:17.225 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:17.222 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:17.222 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table new_ci_collation_test.t1 exists table new_ci_collation_test.t2 exists table new_ci_collation_test.t3 exists table new_ci_collation_test.t4 not exists for 1-th check, retry later Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release changefeed count 0 check pass, pd_addr: https://127.0.0.1:2579 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.4281.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-cli-test-11699?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --tz=Asia/Shanghai -c=custom-changefeed-name VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcb3a500013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-c2q9n-lf6gc, pid:7535, start at 2024-05-04 22:12:17.218523101 +0800 CST m=+5.224363304 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:17.225 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:17.222 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:17.222 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcb3c900014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-c2q9n-lf6gc, pid:7621, start at 2024-05-04 22:12:17.343034023 +0800 CST m=+5.299068172 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:17.350 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:17.316 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:17.316 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/multi_source/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/multi_source/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_source/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_source/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_source/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [WARN] --tz is deprecated in changefeed settings. Create changefeed successfully! ID: custom-changefeed-name Info: {"upstream_id":7365146918449229766,"namespace":"default","id":"custom-changefeed-name","sink_uri":"kafka://127.0.0.1:9092/ticdc-cli-test-11699?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:12:20.519212631+08:00","start_ts":449532904167702531,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532904167702531,"checkpoint_ts":449532904167702531,"checkpoint_time":"2024-05-04 22:12:20.337"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table changefeed_reconstruct.usertable exists check diff failed 1-th time, retry later table new_ci_collation_test.t4 exists table new_ci_collation_test.t5 not exists for 1-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_tables_ddl_v2/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... *************************** 1. row *************************** count(distinct region_id): 1 + set +x VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcb7a640012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw, pid:4914, start at 2024-05-04 22:12:21.312246236 +0800 CST m=+5.294844439 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:21.319 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:21.324 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:21.324 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff successfully table new_ci_collation_test.t5 not exists for 2-th check, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_source.cli.8969.out cli tso query --pd=http://127.0.0.1:2379 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/cdc/run.sh using Sink-Type: kafka... <<================= wait process 5840 exit for 1-th time... wait process 5840 exit for 2-th time... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/drop_many_tables/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... wait process 5840 exit for 3-th time... + set +x + tso='449532904766701571 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532904766701571 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 22:12:24 CST 2024] <<<<<< START cdc server in multi_source case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_source.90049006.out server --log-file /tmp/tidb_cdc_test/multi_source/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_source/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcb7a640012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw, pid:4914, start at 2024-05-04 22:12:21.312246236 +0800 CST m=+5.294844439 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:21.319 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:21.324 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:21.324 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcb7bf0000d Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw, pid:5004, start at 2024-05-04 22:12:21.384807916 +0800 CST m=+5.309628484 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:21.391 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:21.372 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:21.372 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_pause_resume/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils/kill_cdc_pid: line 19: kill: (5840) - No such process wait process 5840 exit for 4-th time... process 5840 already exit check_no_capture http://127.0.0.1:2379 parse error: Invalid numeric literal at line 1, column 6 run task successfully [Sat May 4 22:12:24 CST 2024] <<<<<< START cdc server in changefeed_reconstruct case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_reconstruct.61466148.out server --log-file /tmp/tidb_cdc_test/changefeed_reconstruct/cdcserver2.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_reconstruct/cdc_dataserver2 --cluster-id default --addr 127.0.0.1:8300 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 table new_ci_collation_test.t5 exists check diff failed 1-th time, retry later start tidb cluster in /tmp/tidb_cdc_test/multi_tables_ddl_v2 Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... + endpoints=https://127.0.0.1:2579 + changefeed_id=custom-changefeed-name + expected_state=normal + error_msg=null + tls_dir=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates + [[ https://127.0.0.1:2579 =~ https ]] ++ cdc cli changefeed query --ca=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem --cert=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem --key=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem --pd=https://127.0.0.1:2579 -c custom-changefeed-name -s + info='{ "upstream_id": 7365146918449229766, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449532905137635331, "checkpoint_time": "2024-05-04 22:12:24.037", "error": null }' + echo '{ "upstream_id": 7365146918449229766, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449532905137635331, "checkpoint_time": "2024-05-04 22:12:24.037", "error": null }' { "upstream_id": 7365146918449229766, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449532905137635331, "checkpoint_time": "2024-05-04 22:12:24.037", "error": null } ++ echo '{' '"upstream_id":' 7365146918449229766, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449532905137635331, '"checkpoint_time":' '"2024-05-04' '22:12:24.037",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365146918449229766, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449532905137635331, '"checkpoint_time":' '"2024-05-04' '22:12:24.037",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) split_and_random_merge scale: 40 The 1 times to try to start tidb cluster... start tidb cluster in /tmp/tidb_cdc_test/drop_many_tables Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... *************************** 1. row *************************** count(distinct region_id): 40 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.4391.out cli changefeed create --start-ts=449532895280758786 '--sink-uri=kafka://127.0.0.1:9093/ticdc-cli-test-ssl-29791?protocol=open-protocol&ca=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem&cert=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem&key=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem&kafka-version=2.4.1&max-message-bytes=10485760&insecure-skip-verify=true' --tz=Asia/Shanghai [Sat May 4 22:12:26 CST 2024] <<<<<< START cdc server in changefeed_pause_resume case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_pause_resume.63426344.out server --log-file /tmp/tidb_cdc_test/changefeed_pause_resume/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_pause_resume/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [WARN] --tz is deprecated in changefeed settings. check diff failed 2-th time, retry later Create changefeed successfully! ID: 418dfd35-673c-4ac9-915e-fa693048fb90 Info: {"upstream_id":7365146918449229766,"namespace":"default","id":"418dfd35-673c-4ac9-915e-fa693048fb90","sink_uri":"kafka://127.0.0.1:9093/ticdc-cli-test-ssl-29791?protocol=open-protocol\u0026ca=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem\u0026cert=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem\u0026key=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760\u0026insecure-skip-verify=true","create_time":"2024-05-04T22:12:27.195123806+08:00","start_ts":449532895280758786,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532895280758786,"checkpoint_ts":449532895280758786,"checkpoint_time":"2024-05-04 22:11:46.436"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:12:27 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b974ef4a-96b6-4466-b45e-564cb680199a {"id":"b974ef4a-96b6-4466-b45e-564cb680199a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831944} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2a45ed5 b974ef4a-96b6-4466-b45e-564cb680199a /tidb/cdc/default/default/upstream/7365147046302713964 {"id":7365147046302713964,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b974ef4a-96b6-4466-b45e-564cb680199a {"id":"b974ef4a-96b6-4466-b45e-564cb680199a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831944} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2a45ed5 b974ef4a-96b6-4466-b45e-564cb680199a /tidb/cdc/default/default/upstream/7365147046302713964 {"id":7365147046302713964,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b974ef4a-96b6-4466-b45e-564cb680199a {"id":"b974ef4a-96b6-4466-b45e-564cb680199a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831944} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2a45ed5 b974ef4a-96b6-4466-b45e-564cb680199a /tidb/cdc/default/default/upstream/7365147046302713964 {"id":7365147046302713964,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_source.cli.9066.out cli changefeed create --start-ts=449532904766701571 '--sink-uri=kafka://127.0.0.1:9092/ticdc-multi-source-test-20515?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcbcef00007 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6, pid:5684, start at 2024-05-04 22:12:26.690273936 +0800 CST m=+5.472071516 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:26.700 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:26.684 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:26.684 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Create changefeed successfully! ID: 08fad29b-1bce-40bb-9fd6-7d0e99d5ff66 Info: {"upstream_id":7365147046302713964,"namespace":"default","id":"08fad29b-1bce-40bb-9fd6-7d0e99d5ff66","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-source-test-20515?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:12:27.673532194+08:00","start_ts":449532904766701571,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532904766701571,"checkpoint_ts":449532904766701571,"checkpoint_time":"2024-05-04 22:12:22.622"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:12:27 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: changefeedID: default/194d71a7-9ba0-486d-8650-824e95dfd4ea {UpstreamID:7365147017480855735 Namespace:default ID:194d71a7-9ba0-486d-8650-824e95dfd4ea SinkURI:kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-11008?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 22:12:15.990660011 +0800 CST StartTs:449532902981763077 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0035261b0 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-52-g6a342866d Epoch:449532903020822532} {CheckpointTs:449532904607055875 MinTableBarrierTs:449532905917513731 AdminJobType:noop} span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449532904607055875, checkpointTs: 449532904607055875, state: Preparing *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/1446c06d-1c55-4aa7-88a0-61b6929b17a5 {"id":"1446c06d-1c55-4aa7-88a0-61b6929b17a5","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831945} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2870b79 1446c06d-1c55-4aa7-88a0-61b6929b17a5 /tidb/cdc/default/default/changefeed/info/194d71a7-9ba0-486d-8650-824e95dfd4ea {"upstream-id":7365147017480855735,"namespace":"default","changefeed-id":"194d71a7-9ba0-486d-8650-824e95dfd4ea","sink-uri":"kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-11008?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:12:15.990660011+08:00","start-ts":449532902981763077,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532903020822532} /tidb/cdc/default/default/changefeed/status/194d71a7-9ba0-486d-8650-824e95dfd4ea {"checkpoint-ts":449532904607055875,"min-table-barrier-ts":449532905917513731,"admin-job-type":0} /tidb/cdc/default/default/task/position/1446c06d-1c55-4aa7-88a0-61b6929b17a5/194d71a7-9ba0-486d-8650-824e95dfd4ea {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365147017480855735 {"id":7365147017480855735,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/194d71a7-9ba0-486d-8650-824e95dfd4ea {UpstreamID:7365147017480855735 Namespace:default ID:194d71a7-9ba0-486d-8650-824e95dfd4ea SinkURI:kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-11008?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 22:12:15.990660011 +0800 CST StartTs:449532902981763077 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0035261b0 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-52-g6a342866d Epoch:449532903020822532} {CheckpointTs:449532904607055875 MinTableBarrierTs:449532905917513731 AdminJobType:noop} span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449532904607055875, checkpointTs: 449532904607055875, state: Preparing *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/1446c06d-1c55-4aa7-88a0-61b6929b17a5 {"id":"1446c06d-1c55-4aa7-88a0-61b6929b17a5","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831945} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2870b79 1446c06d-1c55-4aa7-88a0-61b6929b17a5 /tidb/cdc/default/default/changefeed/info/194d71a7-9ba0-486d-8650-824e95dfd4ea {"upstream-id":7365147017480855735,"namespace":"default","changefeed-id":"194d71a7-9ba0-486d-8650-824e95dfd4ea","sink-uri":"kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-11008?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:12:15.990660011+08:00","start-ts":449532902981763077,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532903020822532} /tidb/cdc/default/default/changefeed/status/194d71a7-9ba0-486d-8650-824e95dfd4ea {"checkpoint-ts":449532904607055875,"min-table-barrier-ts":449532905917513731,"admin-job-type":0} /tidb/cdc/default/default/task/position/1446c06d-1c55-4aa7-88a0-61b6929b17a5/194d71a7-9ba0-486d-8650-824e95dfd4ea {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365147017480855735 {"id":7365147017480855735,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/194d71a7-9ba0-486d-8650-824e95dfd4ea {UpstreamID:7365147017480855735 Namespace:default ID:194d71a7-9ba0-486d-8650-824e95dfd4ea SinkURI:kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-11008?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 22:12:15.990660011 +0800 CST StartTs:449532902981763077 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0035261b0 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-52-g6a342866d Epoch:449532903020822532} {CheckpointTs:449532904607055875 MinTableBarrierTs:449532905917513731 AdminJobType:noop} span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449532904607055875, checkpointTs: 449532904607055875, state: Preparing *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/1446c06d-1c55-4aa7-88a0-61b6929b17a5 {"id":"1446c06d-1c55-4aa7-88a0-61b6929b17a5","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831945} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2870b79 1446c06d-1c55-4aa7-88a0-61b6929b17a5 /tidb/cdc/default/default/changefeed/info/194d71a7-9ba0-486d-8650-824e95dfd4ea {"upstream-id":7365147017480855735,"namespace":"default","changefeed-id":"194d71a7-9ba0-486d-8650-824e95dfd4ea","sink-uri":"kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-11008?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:12:15.990660011+08:00","start-ts":449532902981763077,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532903020822532} /tidb/cdc/default/default/changefeed/status/194d71a7-9ba0-486d-8650-824e95dfd4ea {"checkpoint-ts":449532904607055875,"min-table-barrier-ts":449532905917513731,"admin-job-type":0} /tidb/cdc/default/default/task/position/1446c06d-1c55-4aa7-88a0-61b6929b17a5/194d71a7-9ba0-486d-8650-824e95dfd4ea {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365147017480855735 {"id":7365147017480855735,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x cdc.test cli capture list --pd=http://127.0.0.1:2379 2>&1 | grep id Verifying downstream PD is started... + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.4429.out cli unsafe delete-service-gc-safepoint check diff failed 3-th time, retry later Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Confirm that you know what this command will do and use it at your own risk [Y/N] CDC service GC safepoint truncated in PD! PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > "id": "1446c06d-1c55-4aa7-88a0-61b6929b17a5", "cluster-id": "default" run task successfully VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcbcef00007 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6, pid:5684, start at 2024-05-04 22:12:26.690273936 +0800 CST m=+5.472071516 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:26.700 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:26.684 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:26.684 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcbcfa80018 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6, pid:5767, start at 2024-05-04 22:12:26.775139718 +0800 CST m=+5.500508358 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:26.784 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:26.780 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:26.780 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_compression/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_compression/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_compression/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_compression/tiflash-proxy.toml"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_compression/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + set +x [Sat May 4 22:12:29 CST 2024] <<<<<< START kafka consumer in multi_source case >>>>>> go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading go.uber.org/atomic v1.11.0 go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading github.com/coreos/go-semver v0.3.1 go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/golang/protobuf v1.5.4 go: downloading golang.org/x/net v0.24.0 go: downloading google.golang.org/protobuf v1.33.0 go: downloading golang.org/x/sys v0.19.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:12:29 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ccb295c7-6c62-42f0-8d64-2926b88cf93a {"id":"ccb295c7-6c62-42f0-8d64-2926b88cf93a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831946} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2b23dce ccb295c7-6c62-42f0-8d64-2926b88cf93a /tidb/cdc/default/default/upstream/7365147068251713696 {"id":7365147068251713696,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ccb295c7-6c62-42f0-8d64-2926b88cf93a {"id":"ccb295c7-6c62-42f0-8d64-2926b88cf93a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831946} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2b23dce ccb295c7-6c62-42f0-8d64-2926b88cf93a /tidb/cdc/default/default/upstream/7365147068251713696 {"id":7365147068251713696,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ccb295c7-6c62-42f0-8d64-2926b88cf93a {"id":"ccb295c7-6c62-42f0-8d64-2926b88cf93a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831946} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2b23dce ccb295c7-6c62-42f0-8d64-2926b88cf93a /tidb/cdc/default/default/upstream/7365147068251713696 {"id":7365147068251713696,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x start tidb cluster in /tmp/tidb_cdc_test/cdc Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... go: downloading golang.org/x/text v0.14.0 [Sat May 4 22:12:29 CST 2024] <<<<<< START kafka consumer in changefeed_pause_resume case >>>>>> table changefeed_pause_resume.t1 not exists for 1-th check, retry later + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.4460.out cli unsafe reset --no-confirm --pd=https://127.0.0.1:2579 Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release capture_id: 1446c06d-1c55-4aa7-88a0-61b6929b17a5 check_processor_table_count http://127.0.0.1:2379 194d71a7-9ba0-486d-8650-824e95dfd4ea 1446c06d-1c55-4aa7-88a0-61b6929b17a5 1 check diff failed 4-th time, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) run task successfully reset and all metadata truncated in PD! PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... check_processor_table_count http://127.0.0.1:2379 194d71a7-9ba0-486d-8650-824e95dfd4ea 1446c06d-1c55-4aa7-88a0-61b6929b17a5 0 table count 1 does equal to expected count 0 run task failed 1-th time, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/spf13/pflag v1.0.5 go: downloading golang.org/x/sync v0.7.0 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/tidwall/btree v1.7.0 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/google/uuid v1.6.0 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/stretchr/testify v1.9.0 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading golang.org/x/oauth2 v0.18.0 go: downloading google.golang.org/api v0.170.0 go: downloading github.com/google/btree v1.1.2 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading cloud.google.com/go v0.112.2 go: downloading golang.org/x/tools v0.20.0 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/golang/snappy v0.0.4 go: downloading golang.org/x/time v0.5.0 go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading golang.org/x/crypto v0.22.0 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/jfcg/sixb v1.3.8 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading github.com/robfig/cron v1.2.0 go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/golang/glog v1.2.0 split_and_random_merge scale: 80 go: downloading github.com/apache/thrift v0.16.0 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/rivo/uniseg v0.4.7 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/kr/text v0.2.0 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da [Sat May 4 22:12:31 CST 2024] <<<<<< START cdc server in kafka_compression case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.72067208.out server --log-file /tmp/tidb_cdc_test/kafka_compression/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_compression/cdc_data --cluster-id default + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 go: downloading github.com/go-logr/logr v1.4.1 go: downloading github.com/go-logr/stdr v1.2.2 go: downloading go.opentelemetry.io/otel/metric v1.24.0 + set +x Starting Upstream TiDB... go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 go: downloading github.com/DataDog/zstd v1.5.5 Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 5-th time, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check_processor_table_count http://127.0.0.1:2379 194d71a7-9ba0-486d-8650-824e95dfd4ea 1446c06d-1c55-4aa7-88a0-61b6929b17a5 0 run task successfully ***************** properties ***************** "mysql.host"="127.0.0.1" "operationcount"="0" "updateproportion"="0" "mysql.port"="4000" "readproportion"="0" "dotransactions"="false" "workload"="core" "readallfields"="true" "mysql.user"="root" "threadcount"="4" "requestdistribution"="uniform" "recordcount"="50" "insertproportion"="0" "mysql.db"="changefeed_reconstruct" "scanproportion"="0" ********************************************** table changefeed_pause_resume.t1 exists table changefeed_pause_resume.t2 not exists for 1-th check, retry later Run finished, takes 16.90139ms INSERT - Takes(s): 0.0, Count: 48, OPS: 3647.7, Avg(us): 1322, Min(us): 867, Max(us): 3744, 95th(us): 4000, 99th(us): 4000 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:12:34 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a9ea49ff-0c31-4c0b-8f03-0aa45434b3dc {"id":"a9ea49ff-0c31-4c0b-8f03-0aa45434b3dc","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831951} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2c667cf a9ea49ff-0c31-4c0b-8f03-0aa45434b3dc /tidb/cdc/default/default/upstream/7365147085772336007 {"id":7365147085772336007,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a9ea49ff-0c31-4c0b-8f03-0aa45434b3dc {"id":"a9ea49ff-0c31-4c0b-8f03-0aa45434b3dc","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831951} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2c667cf a9ea49ff-0c31-4c0b-8f03-0aa45434b3dc /tidb/cdc/default/default/upstream/7365147085772336007 {"id":7365147085772336007,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a9ea49ff-0c31-4c0b-8f03-0aa45434b3dc {"id":"a9ea49ff-0c31-4c0b-8f03-0aa45434b3dc","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831951} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2c667cf a9ea49ff-0c31-4c0b-8f03-0aa45434b3dc /tidb/cdc/default/default/upstream/7365147085772336007 {"id":7365147085772336007,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7267.out cli tso query --pd=http://127.0.0.1:2379 check diff successfully check diff failed 1-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table changefeed_pause_resume.t2 exists table changefeed_pause_resume.t3 not exists for 1-th check, retry later + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.4572.out cli unsafe resolve-lock --region=8 go: downloading github.com/jmespath/go-jmespath v0.4.0 go: downloading github.com/google/s2a-go v0.1.7 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading github.com/felixge/httpsnoop v1.0.4 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... + set +x + tso='449532908007325699 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532908007325699 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7311.out cli changefeed create --start-ts=449532908007325699 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-gzip-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=gzip' -c gzip go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd Create changefeed successfully! ID: gzip Info: {"upstream_id":7365147085772336007,"namespace":"default","id":"gzip","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-gzip-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=gzip","create_time":"2024-05-04T22:12:36.930761402+08:00","start_ts":449532908007325699,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532908007325699,"checkpoint_ts":449532908007325699,"checkpoint_time":"2024-05-04 22:12:34.984"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcc68600014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-n5hp4-vjsxk, pid:5811, start at 2024-05-04 22:12:36.534820454 +0800 CST m=+5.529365395 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:36.547 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:36.555 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:36.555 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table changefeed_pause_resume.t3 exists table changefeed_reconstruct.usertable exists + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.4608.out cli unsafe resolve-lock --region=8 --ts=449532907365597187 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... check diff successfully wait process cdc.test exit for 1-th time... + set +x *************************** 1. row *************************** count(distinct region_id): 4 wait process cdc.test exit for 2-th time... check diff successfully [Sat May 4 22:12:38 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>> /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/05/04 22:12:36.875 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 22:12:36.925 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 22:12:37.035 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 22:12:37.046 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 22:12:38.009 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 22:12:38.018 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"]: syntax error: operand expected (error token is "[2024/05/04 22:12:36.875 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 22:12:36.925 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 22:12:37.035 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 22:12:37.046 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 22:12:38.009 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 22:12:38.018 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"]") table test.gzip_finish_mark not exists for 1-th check, retry later wait process cdc.test exit for 3-th time... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcc7b8c000e Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fchds-2rh3m, pid:4444, start at 2024-05-04 22:12:37.746736889 +0800 CST m=+5.176743354 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:37.753 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:37.731 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:37.731 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcc86500010 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-r9cq4-jzgfv, pid:4238, start at 2024-05-04 22:12:38.435654575 +0800 CST m=+5.272339502 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:38.444 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:38.420 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:38.420 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcc68600014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-n5hp4-vjsxk, pid:5811, start at 2024-05-04 22:12:36.534820454 +0800 CST m=+5.529365395 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:36.547 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:36.555 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:36.555 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcc693c0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-n5hp4-vjsxk, pid:5893, start at 2024-05-04 22:12:36.603322313 +0800 CST m=+5.541567021 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:36.611 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:36.609 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:36.609 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... + set +x % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed check diff failed 1-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 1-th time... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/log/error.log arg matches is ArgMatches { args: {"log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 218 100 212 100 6 1736 49 --:--:-- --:--:-- --:--:-- 1752 { "error_msg": "[CDC:ErrAPIInvalidParam]invalid log level: json: cannot unmarshal string into Go value of type struct { Level string \"json:\\\"log_level\\\"\" }", "error_code": "CDC:ErrAPIInvalidParam" cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 22:12:39 CST 2024] <<<<<< run test case new_ci_collation success! >>>>>> wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:12:40 CST 2024] <<<<<< run test case changefeed_reconstruct success! >>>>>> VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcc7b8c000e Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fchds-2rh3m, pid:4444, start at 2024-05-04 22:12:37.746736889 +0800 CST m=+5.176743354 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:37.753 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:37.731 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:37.731 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcc7cfc0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fchds-2rh3m, pid:4531, start at 2024-05-04 22:12:37.845232726 +0800 CST m=+5.221516819 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:37.853 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:37.823 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:37.823 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/cdc/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/cdc/tiflash/log/error.log arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/cdc/tiflash-proxy.toml"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/cdc/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/cdc/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table test.gzip_finish_mark not exists for 2-th check, retry later table sink_retry.finish_mark_2 exists check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:12:39 CST 2024] <<<<<< run test case sink_retry success! >>>>>> VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcc86500010 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-r9cq4-jzgfv, pid:4238, start at 2024-05-04 22:12:38.435654575 +0800 CST m=+5.272339502 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:38.444 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:38.420 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:38.420 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcc87c40005 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-r9cq4-jzgfv, pid:4319, start at 2024-05-04 22:12:38.517495901 +0800 CST m=+5.297979038 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:14:38.524 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:12:38.513 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:02:38.513 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/drop_many_tables/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/drop_many_tables/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/drop_many_tables/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/drop_many_tables/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/drop_many_tables/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } check diff failed 2-th time, retry later [Sat May 4 22:12:41 CST 2024] <<<<<< START cdc server in multi_tables_ddl_v2 case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_tables_ddl_v2.73047306.out server --log-file /tmp/tidb_cdc_test/multi_tables_ddl_v2/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_tables_ddl_v2/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table region_merge.t1 exists } % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 181 100 181 0 0 1494 0 --:--:-- --:--:-- --:--:-- 1483 { "version": "v8.2.0-alpha-52-g6a342866d", "git_hash": "6a342866deda3271b067f649c64b771bbe3d2a00", "id": "ae1c868d-28eb-415c-9172-e4f2cf7622c2", "pid": 3666, "is_owner": true check diff failed 1-th time, retry later }wait process cdc.test exit for 1-th time... table test.gzip_finish_mark exists check diff successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7430.out cli changefeed pause -c gzip PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... [Sat May 4 22:12:42 CST 2024] <<<<<< START cdc server in cdc case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cdc.59986000.out server --log-file /tmp/tidb_cdc_test/cdc/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/cdc/cdc_data --cluster-id default + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.drop_many_tables.cli.5715.out cli tso query --pd=http://127.0.0.1:2379 wait process cdc.test exit for 2-th time... check diff failed 3-th time, retry later cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:12:44 CST 2024] <<<<<< run test case cli_tls_with_auth success! >>>>>> + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7477.out cli changefeed remove -c gzip < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:12:44 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3b1cb752-ab20-480b-bc3c-13e281a84086 {"id":"3b1cb752-ab20-480b-bc3c-13e281a84086","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831961} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2ec90d0 3b1cb752-ab20-480b-bc3c-13e281a84086 /tidb/cdc/default/default/upstream/7365147128483281608 {"id":7365147128483281608,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3b1cb752-ab20-480b-bc3c-13e281a84086 {"id":"3b1cb752-ab20-480b-bc3c-13e281a84086","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831961} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2ec90d0 3b1cb752-ab20-480b-bc3c-13e281a84086 /tidb/cdc/default/default/upstream/7365147128483281608 {"id":7365147128483281608,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3b1cb752-ab20-480b-bc3c-13e281a84086 {"id":"3b1cb752-ab20-480b-bc3c-13e281a84086","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831961} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2ec90d0 3b1cb752-ab20-480b-bc3c-13e281a84086 /tidb/cdc/default/default/upstream/7365147128483281608 {"id":7365147128483281608,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: test-normal Info: {"upstream_id":7365147128483281608,"namespace":"default","id":"test-normal","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-tables-ddl-test-normal-7156?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:12:44.786945669+08:00","start_ts":449532909703659521,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["multi_tables_ddl_test.t1","multi_tables_ddl_test.t2","multi_tables_ddl_test.t3","multi_tables_ddl_test.t4","multi_tables_ddl_test.t1_7","multi_tables_ddl_test.t2_7","multi_tables_ddl_test.finish_mark"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532909703659521,"checkpoint_ts":449532909703659521,"checkpoint_time":"2024-05-04 22:12:41.455"} Create changefeed successfully! ID: test-error-1 Info: {"upstream_id":7365147128483281608,"namespace":"default","id":"test-error-1","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-tables-ddl-test-error-1-12336?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:12:45.020361399+08:00","start_ts":449532909703659521,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["multi_tables_ddl_test.t5","multi_tables_ddl_test.t6","multi_tables_ddl_test.t7","multi_tables_ddl_test.t8"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532909703659521,"checkpoint_ts":449532909703659521,"checkpoint_time":"2024-05-04 22:12:41.455"} Create changefeed successfully! ID: test-error-2 Info: {"upstream_id":7365147128483281608,"namespace":"default","id":"test-error-2","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-tables-ddl-test-error-2-19313?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:12:45.215687278+08:00","start_ts":449532909703659521,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["multi_tables_ddl_test.t9","multi_tables_ddl_test.t10"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532909703659521,"checkpoint_ts":449532909703659521,"checkpoint_time":"2024-05-04 22:12:41.455"} [Sat May 4 22:12:45 CST 2024] <<<<<< START kafka consumer in multi_tables_ddl_v2 case >>>>>> [Sat May 4 22:12:45 CST 2024] <<<<<< START kafka consumer in multi_tables_ddl_v2 case >>>>>> [Sat May 4 22:12:45 CST 2024] <<<<<< START kafka consumer in multi_tables_ddl_v2 case >>>>>> check diff successfully wait process cdc.test exit for 1-th time... + set +x + tso='449532910336999425 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532910336999425 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 22:12:45 CST 2024] <<<<<< START cdc server in drop_many_tables case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.drop_many_tables.57525754.out server --log-file /tmp/tidb_cdc_test/drop_many_tables/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/drop_many_tables/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Changefeed remove successfully. ID: gzip CheckpointTs: 449532908924829732 SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-gzip-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=gzip PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... check diff failed 4-th time, retry later wait process cdc.test exit for 2-th time... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:12:45 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/535a8b74-eb29-42c9-8ddb-813a5be2a8bd {"id":"535a8b74-eb29-42c9-8ddb-813a5be2a8bd","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831963} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2fea3ca 535a8b74-eb29-42c9-8ddb-813a5be2a8bd /tidb/cdc/default/default/upstream/7365147139367587455 {"id":7365147139367587455,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/535a8b74-eb29-42c9-8ddb-813a5be2a8bd {"id":"535a8b74-eb29-42c9-8ddb-813a5be2a8bd","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831963} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2fea3ca 535a8b74-eb29-42c9-8ddb-813a5be2a8bd /tidb/cdc/default/default/upstream/7365147139367587455 {"id":7365147139367587455,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/535a8b74-eb29-42c9-8ddb-813a5be2a8bd {"id":"535a8b74-eb29-42c9-8ddb-813a5be2a8bd","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831963} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2fea3ca 535a8b74-eb29-42c9-8ddb-813a5be2a8bd /tidb/cdc/default/default/upstream/7365147139367587455 {"id":7365147139367587455,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cdc.cli.6050.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-cdc-test-11498?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --config /tmp/tidb_cdc_test/cdc/pulsar_test.toml wait process cdc.test exit for 3-th time... Create changefeed successfully! ID: 085b5665-394d-4201-b5b2-8003c5492e66 Info: {"upstream_id":7365147139367587455,"namespace":"default","id":"085b5665-394d-4201-b5b2-8003c5492e66","sink_uri":"kafka://127.0.0.1:9092/ticdc-cdc-test-11498?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:12:46.422258072+08:00","start_ts":449532910968766469,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532910968766469,"checkpoint_ts":449532910968766469,"checkpoint_time":"2024-05-04 22:12:46.281"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7507.out cli tso query --pd=http://127.0.0.1:2379 cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 22:12:46 CST 2024] <<<<<< run test case region_merge success! >>>>>> + set +x [Sat May 4 22:12:47 CST 2024] <<<<<< START kafka consumer in cdc case >>>>>> go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading go.uber.org/atomic v1.11.0 go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/coreos/go-semver v0.3.1 check diff failed 5-th time, retry later go: downloading github.com/golang/protobuf v1.5.4 go: downloading golang.org/x/sys v0.19.0 go: downloading google.golang.org/protobuf v1.33.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading golang.org/x/net v0.24.0 go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:12:48 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/fbd28638-7d17-4725-b2de-093ccf6a0680 {"id":"fbd28638-7d17-4725-b2de-093ccf6a0680","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831965} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2f219d6 fbd28638-7d17-4725-b2de-093ccf6a0680 /tidb/cdc/default/default/upstream/7365147142747607697 {"id":7365147142747607697,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/fbd28638-7d17-4725-b2de-093ccf6a0680 {"id":"fbd28638-7d17-4725-b2de-093ccf6a0680","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831965} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2f219d6 fbd28638-7d17-4725-b2de-093ccf6a0680 /tidb/cdc/default/default/upstream/7365147142747607697 {"id":7365147142747607697,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/fbd28638-7d17-4725-b2de-093ccf6a0680 {"id":"fbd28638-7d17-4725-b2de-093ccf6a0680","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831965} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f2f219d6 fbd28638-7d17-4725-b2de-093ccf6a0680 /tidb/cdc/default/default/upstream/7365147142747607697 {"id":7365147142747607697,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.drop_many_tables.cli.5806.out cli changefeed create --start-ts=449532910336999425 '--sink-uri=kafka://127.0.0.1:9092/ticdc-drop-tables-test-17346?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' go: downloading golang.org/x/text v0.14.0 + set +x + tso='449532911192375298 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532911192375298 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7545.out cli changefeed create --start-ts=449532911192375298 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-snappy-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=snappy' -c snappy Create changefeed successfully! ID: snappy Info: {"upstream_id":7365147085772336007,"namespace":"default","id":"snappy","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-snappy-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=snappy","create_time":"2024-05-04T22:12:49.014187334+08:00","start_ts":449532911192375298,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532911192375298,"checkpoint_ts":449532911192375298,"checkpoint_time":"2024-05-04 22:12:47.134"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... Create changefeed successfully! ID: 681dc99c-93a8-4256-8118-de97905e8742 Info: {"upstream_id":7365147142747607697,"namespace":"default","id":"681dc99c-93a8-4256-8118-de97905e8742","sink_uri":"kafka://127.0.0.1:9092/ticdc-drop-tables-test-17346?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:12:48.905880075+08:00","start_ts":449532910336999425,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532910336999425,"checkpoint_ts":449532910336999425,"checkpoint_time":"2024-05-04 22:12:43.871"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... table multi_tables_ddl_test.t55 not exists for 1-th check, retry later check diff successfully + set +x [Sat May 4 22:12:50 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>> go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading golang.org/x/sync v0.7.0 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/google/uuid v1.6.0 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/stretchr/testify v1.9.0 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/tidwall/btree v1.7.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading cloud.google.com/go v0.112.2 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading google.golang.org/api v0.170.0 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading github.com/google/btree v1.1.2 go: downloading golang.org/x/tools v0.20.0 go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading golang.org/x/time v0.5.0 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/golang/snappy v0.0.4 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading golang.org/x/crypto v0.22.0 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading github.com/jfcg/sixb v1.3.8 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading github.com/robfig/cron v1.2.0 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac + set +x [Sat May 4 22:12:50 CST 2024] <<<<<< START kafka consumer in drop_many_tables case >>>>>> /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/05/04 22:12:48.977 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 22:12:49.010 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 22:12:49.110 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 22:12:49.120 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 22:12:50.109 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 22:12:50.118 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"]: syntax error: operand expected (error token is "[2024/05/04 22:12:48.977 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 22:12:49.010 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 22:12:49.110 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 22:12:49.120 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 22:12:50.109 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 22:12:50.118 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"]") table test.snappy_finish_mark not exists for 1-th check, retry later go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/apache/thrift v0.16.0 go: downloading github.com/kr/text v0.2.0 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading github.com/go-logr/logr v1.4.1 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/go-logr/stdr v1.2.2 table multi_tables_ddl_test.t55 not exists for 2-th check, retry later go: downloading github.com/DataDog/zstd v1.5.5 go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 table drop_tables.c not exists for 1-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/batch_add_table/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... check diff failed 1-th time, retry later table multi_tables_ddl_test.t55 not exists for 3-th check, retry later table test.snappy_finish_mark not exists for 2-th check, retry later table drop_tables.c not exists for 2-th check, retry later check diff failed 2-th time, retry later go: downloading github.com/jmespath/go-jmespath v0.4.0 go: downloading github.com/google/s2a-go v0.1.7 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading github.com/felixge/httpsnoop v1.0.4 start tidb cluster in /tmp/tidb_cdc_test/batch_add_table Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table test.snappy_finish_mark not exists for 3-th check, retry later table multi_tables_ddl_test.t55 not exists for 4-th check, retry later go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd check diff failed 3-th time, retry later table drop_tables.c not exists for 3-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_error/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_basic/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table multi_tables_ddl_test.t55 exists table multi_tables_ddl_test.t66 exists table multi_tables_ddl_test.t7 exists table multi_tables_ddl_test.t88 exists table multi_tables_ddl_test.finish_mark not exists for 1-th check, retry later table test.snappy_finish_mark exists check diff successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7668.out cli changefeed pause -c snappy PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... table drop_tables.c not exists for 4-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/changefeed_error Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... check diff failed 4-th time, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_capture/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7703.out cli changefeed remove -c snappy table multi_tables_ddl_test.finish_mark not exists for 2-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/common_1/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Changefeed remove successfully. ID: snappy CheckpointTs: 449532913590992901 SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-snappy-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=snappy PASS start tidb cluster in /tmp/tidb_cdc_test/kafka_simple_basic Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table drop_tables.c not exists for 5-th check, retry later coverage: 2.0% of statements in github.com/pingcap/tiflow/... check diff failed 5-th time, retry later start tidb cluster in /tmp/tidb_cdc_test/multi_capture Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table multi_tables_ddl_test.finish_mark exists check table exists success + endpoints=http://127.0.0.1:2379 + changefeed_id=test-normal + expected_state=normal + error_msg=null + tls_dir= + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test-normal -s + info='{ "upstream_id": 7365147128483281608, "namespace": "default", "id": "test-normal", "state": "normal", "checkpoint_tso": 449532911538405397, "checkpoint_time": "2024-05-04 22:12:48.454", "error": null }' + echo '{ "upstream_id": 7365147128483281608, "namespace": "default", "id": "test-normal", "state": "normal", "checkpoint_tso": 449532911538405397, "checkpoint_time": "2024-05-04 22:12:48.454", "error": null }' { "upstream_id": 7365147128483281608, "namespace": "default", "id": "test-normal", "state": "normal", "checkpoint_tso": 449532911538405397, "checkpoint_time": "2024-05-04 22:12:48.454", "error": null } ++ echo '{' '"upstream_id":' 7365147128483281608, '"namespace":' '"default",' '"id":' '"test-normal",' '"state":' '"normal",' '"checkpoint_tso":' 449532911538405397, '"checkpoint_time":' '"2024-05-04' '22:12:48.454",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365147128483281608, '"namespace":' '"default",' '"id":' '"test-normal",' '"state":' '"normal",' '"checkpoint_tso":' 449532911538405397, '"checkpoint_time":' '"2024-05-04' '22:12:48.454",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] + endpoints=http://127.0.0.1:2379 + changefeed_id=test-error-1 + expected_state=normal + error_msg=null + tls_dir= + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test-error-1 -s + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7741.out cli tso query --pd=http://127.0.0.1:2379 Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + info='{ "upstream_id": 7365147128483281608, "namespace": "default", "id": "test-error-1", "state": "normal", "checkpoint_tso": 449532914579275781, "checkpoint_time": "2024-05-04 22:13:00.054", "error": null }' + echo '{ "upstream_id": 7365147128483281608, "namespace": "default", "id": "test-error-1", "state": "normal", "checkpoint_tso": 449532914579275781, "checkpoint_time": "2024-05-04 22:13:00.054", "error": null }' { "upstream_id": 7365147128483281608, "namespace": "default", "id": "test-error-1", "state": "normal", "checkpoint_tso": 449532914579275781, "checkpoint_time": "2024-05-04 22:13:00.054", "error": null } ++ echo '{' '"upstream_id":' 7365147128483281608, '"namespace":' '"default",' '"id":' '"test-error-1",' '"state":' '"normal",' '"checkpoint_tso":' 449532914579275781, '"checkpoint_time":' '"2024-05-04' '22:13:00.054",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365147128483281608, '"namespace":' '"default",' '"id":' '"test-error-1",' '"state":' '"normal",' '"checkpoint_tso":' 449532914579275781, '"checkpoint_time":' '"2024-05-04' '22:13:00.054",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] + endpoints=http://127.0.0.1:2379 + changefeed_id=test-error-2 + expected_state=failed + error_msg=ErrSyncRenameTableFailed + tls_dir= + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test-error-2 -s table drop_tables.c exists check diff successfully + info='{ "upstream_id": 7365147128483281608, "namespace": "default", "id": "test-error-2", "state": "failed", "checkpoint_tso": 449532911027224618, "checkpoint_time": "2024-05-04 22:12:46.504", "error": { "time": "2024-05-04T22:12:49.099688067+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSyncRenameTableFailed", "message": "[CDC:ErrSyncRenameTableFailed]table'\''s old name is not in filter rule, and its new name in filter rule table id '\''130'\'', ddl query: [rename table t11 to t9], it'\''s an unexpected behavior, if you want to replicate this table, please add its old name to filter rule." } }' + echo '{ "upstream_id": 7365147128483281608, "namespace": "default", "id": "test-error-2", "state": "failed", "checkpoint_tso": 449532911027224618, "checkpoint_time": "2024-05-04 22:12:46.504", "error": { "time": "2024-05-04T22:12:49.099688067+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSyncRenameTableFailed", "message": "[CDC:ErrSyncRenameTableFailed]table'\''s old name is not in filter rule, and its new name in filter rule table id '\''130'\'', ddl query: [rename table t11 to t9], it'\''s an unexpected behavior, if you want to replicate this table, please add its old name to filter rule." } }' { "upstream_id": 7365147128483281608, "namespace": "default", "id": "test-error-2", "state": "failed", "checkpoint_tso": 449532911027224618, "checkpoint_time": "2024-05-04 22:12:46.504", "error": { "time": "2024-05-04T22:12:49.099688067+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSyncRenameTableFailed", "message": "[CDC:ErrSyncRenameTableFailed]table's old name is not in filter rule, and its new name in filter rule table id '130', ddl query: [rename table t11 to t9], it's an unexpected behavior, if you want to replicate this table, please add its old name to filter rule." } } ++ jq -r .state ++ echo '{' '"upstream_id":' 7365147128483281608, '"namespace":' '"default",' '"id":' '"test-error-2",' '"state":' '"failed",' '"checkpoint_tso":' 449532911027224618, '"checkpoint_time":' '"2024-05-04' '22:12:46.504",' '"error":' '{' '"time":' '"2024-05-04T22:12:49.099688067+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrSyncRenameTableFailed",' '"message":' '"[CDC:ErrSyncRenameTableFailed]table'\''s' old name is not in filter rule, and its new name in filter rule table id ''\''130'\'',' ddl query: '[rename' table t11 to 't9],' 'it'\''s' an unexpected behavior, if you want to replicate this table, please add its old name to filter 'rule."' '}' '}' + state=failed + [[ ! failed == \f\a\i\l\e\d ]] ++ jq -r .error.message ++ echo '{' '"upstream_id":' 7365147128483281608, '"namespace":' '"default",' '"id":' '"test-error-2",' '"state":' '"failed",' '"checkpoint_tso":' 449532911027224618, '"checkpoint_time":' '"2024-05-04' '22:12:46.504",' '"error":' '{' '"time":' '"2024-05-04T22:12:49.099688067+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrSyncRenameTableFailed",' '"message":' '"[CDC:ErrSyncRenameTableFailed]table'\''s' old name is not in filter rule, and its new name in filter rule table id ''\''130'\'',' ddl query: '[rename' table t11 to 't9],' 'it'\''s' an unexpected behavior, if you want to replicate this table, please add its old name to filter 'rule."' '}' '}' + message='[CDC:ErrSyncRenameTableFailed]table'\''s old name is not in filter rule, and its new name in filter rule table id '\''130'\'', ddl query: [rename table t11 to t9], it'\''s an unexpected behavior, if you want to replicate this table, please add its old name to filter rule.' + [[ ! [CDC:ErrSyncRenameTableFailed]table's old name is not in filter rule, and its new name in filter rule table id '130', ddl query: [rename table t11 to t9], it's an unexpected behavior, if you want to replicate this table, please add its old name to filter rule. =~ ErrSyncRenameTableFailed ]] Verifying downstream PD is started... check diff successfully check diff successfully wait process cdc.test exit for 1-th time... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 1-th time... start tidb cluster in /tmp/tidb_cdc_test/common_1 Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... wait process cdc.test exit for 2-th time... Verifying downstream PD is started... wait process cdc.test exit for 2-th time... + set +x + tso='449532914888605703 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532914888605703 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7781.out cli changefeed create --start-ts=449532914888605703 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-lz4-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=lz4' -c lz4 cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:13:02 CST 2024] <<<<<< run test case drop_many_tables success! >>>>>> Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:13:03 CST 2024] <<<<<< run test case multi_tables_ddl_v2 success! >>>>>> Create changefeed successfully! ID: lz4 Info: {"upstream_id":7365147085772336007,"namespace":"default","id":"lz4","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-lz4-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=lz4","create_time":"2024-05-04T22:13:03.171360761+08:00","start_ts":449532914888605703,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532914888605703,"checkpoint_ts":449532914888605703,"checkpoint_time":"2024-05-04 22:13:01.234"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release check diff failed 1-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x [Sat May 4 22:13:04 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>> /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/05/04 22:13:03.128 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 22:13:03.167 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 22:13:03.263 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 22:13:03.272 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 22:13:04.262 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 22:13:04.271 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"]: syntax error: operand expected (error token is "[2024/05/04 22:13:03.128 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 22:13:03.167 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 22:13:03.263 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 22:13:03.272 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 22:13:04.262 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 22:13:04.271 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"]") table test.lz4_finish_mark not exists for 1-th check, retry later [2024/05/04 22:13:00.378 +08:00] [INFO] [main.go:99] ["running ddl test: 1 modifyColumnDefaultValueDDL2"] [2024/05/04 22:13:00.378 +08:00] [INFO] [main.go:99] ["running ddl test: 0 modifyColumnDefaultValueDDL1"] [2024/05/04 22:13:00.469 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs085e23d7_4a9d_4a96_b096_d37d72e96872"] [2024/05/04 22:13:00.489 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsaf0f5740_4032_4dee_9de8_3702a745cda7"] [2024/05/04 22:13:00.638 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs50950bfa_33d1_469c_b801_00bc3a433470"] [2024/05/04 22:13:00.753 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsa5a4e52c_03f4_49eb_8b97_3ecd0155aede"] [2024/05/04 22:13:00.762 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:00.772 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:00.781 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:00.792 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:00.905 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsb041ba3f_9f35_4438_baa5_d56ab8c58b64"] [2024/05/04 22:13:00.907 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs454b720e_66c5_4c25_8dff_84ca38f6adfc"] [2024/05/04 22:13:00.908 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs6bdd5532_3d7c_4713_b4e9_8508c5de7c9b"] [2024/05/04 22:13:00.909 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs42a83cdc_1134_42ac_951b_a7f39307a8fc"] [2024/05/04 22:13:00.944 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:00.949 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:00.952 +08:00] [INFO] [main.go:178] ["1 insert success: 100"] [2024/05/04 22:13:00.953 +08:00] [INFO] [main.go:178] ["1 insert success: 100"] [2024/05/04 22:13:01.139 +08:00] [INFO] [main.go:178] ["0 insert success: 100"] [2024/05/04 22:13:01.139 +08:00] [INFO] [main.go:178] ["0 insert success: 100"] [2024/05/04 22:13:01.217 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:01.221 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:01.221 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:01.235 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:01.304 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:01.311 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:01.438 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:01.497 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:01.499 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:01.500 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:01.500 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:01.504 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:01.505 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:01.506 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:01.534 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:01.534 +08:00] [INFO] [main.go:178] ["1 insert success: 200"] [2024/05/04 22:13:01.535 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:01.536 +08:00] [INFO] [main.go:178] ["1 insert success: 200"] [2024/05/04 22:13:01.725 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:01.728 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:01.730 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:01.797 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:01.816 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:01.822 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:01.913 +08:00] [INFO] [main.go:178] ["0 insert success: 200"] [2024/05/04 22:13:01.916 +08:00] [INFO] [main.go:178] ["0 insert success: 200"] [2024/05/04 22:13:01.917 +08:00] [INFO] [main.go:199] ["0 delete success: 100"] [2024/05/04 22:13:01.920 +08:00] [INFO] [main.go:199] ["0 delete success: 100"] [2024/05/04 22:13:02.029 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:02.033 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:02.034 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:02.034 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:02.035 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:02.038 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:02.040 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:02.041 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:02.118 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:02.118 +08:00] [INFO] [main.go:178] ["1 insert success: 300"] [2024/05/04 22:13:02.121 +08:00] [INFO] [main.go:178] ["1 insert success: 300"] [2024/05/04 22:13:02.121 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:02.296 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:02.306 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:02.311 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:02.319 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:02.339 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:02.405 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:02.540 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:02.598 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:02.599 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:02.605 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:02.619 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:02.623 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:02.640 +08:00] [INFO] [main.go:178] ["1 insert success: 400"] [2024/05/04 22:13:02.641 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:02.642 +08:00] [INFO] [main.go:178] ["1 insert success: 400"] [2024/05/04 22:13:02.645 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:02.647 +08:00] [INFO] [main.go:178] ["0 insert success: 300"] [2024/05/04 22:13:02.648 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:02.695 +08:00] [INFO] [main.go:178] ["0 insert success: 300"] [2024/05/04 22:13:02.713 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:02.799 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:02.821 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:02.822 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:02.827 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:02.842 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:02.918 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:03.122 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:03.127 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:03.128 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:03.132 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:03.132 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:03.137 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:03.216 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:03.229 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:03.231 +08:00] [INFO] [main.go:178] ["1 insert success: 500"] [2024/05/04 22:13:03.235 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:03.245 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:03.245 +08:00] [INFO] [main.go:178] ["1 insert success: 500"] [2024/05/04 22:13:03.399 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:03.401 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:03.410 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:03.421 +08:00] [INFO] [main.go:178] ["0 insert success: 400"] [2024/05/04 22:13:03.424 +08:00] [INFO] [main.go:178] ["0 insert success: 400"] [2024/05/04 22:13:03.425 +08:00] [INFO] [main.go:199] ["0 delete success: 200"] [2024/05/04 22:13:03.428 +08:00] [INFO] [main.go:199] ["0 delete success: 200"] [2024/05/04 22:13:03.432 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:03.448 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:03.449 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:03.631 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:03.697 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:03.700 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:03.702 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:03.703 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:03.704 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:03.739 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:03.805 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:03.810 +08:00] [INFO] [main.go:178] ["1 insert success: 600"] [2024/05/04 22:13:03.814 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:03.818 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:03.823 +08:00] [INFO] [main.go:178] ["1 insert success: 600"] [2024/05/04 22:13:03.924 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:03.925 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:03.933 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:04.013 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:04.027 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:04.027 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:04.210 +08:00] [INFO] [main.go:178] ["0 insert success: 500"] [2024/05/04 22:13:04.211 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:04.214 +08:00] [INFO] [main.go:178] ["0 insert success: 500"] [2024/05/04 22:13:04.221 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:04.223 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:04.225 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:04.225 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:04.230 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:04.333 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:04.341 +08:00] [INFO] [main.go:178] ["1 insert success: 700"] [2024/05/04 22:13:04.345 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:04.346 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:04.400 +08:00] [INFO] [main.go:178] ["1 insert success: 700"] [2024/05/04 22:13:04.406 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:04.448 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:04.505 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:04.521 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:04.530 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:04.543 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:04.610 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:04.724 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:04.796 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:04.797 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:04.799 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:04.801 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:04.808 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:04.918 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:04.922 +08:00] [INFO] [main.go:178] ["1 insert success: 800"] [2024/05/04 22:13:04.928 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:04.929 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:04.933 +08:00] [INFO] [main.go:178] ["1 insert success: 800"] [2024/05/04 22:13:04.935 +08:00] [INFO] [main.go:178] ["0 insert success: 600"] [2024/05/04 22:13:04.937 +08:00] [INFO] [main.go:178] ["0 insert success: 600"] [2024/05/04 22:13:04.939 +08:00] [INFO] [main.go:199] ["0 delete success: 300"] [2024/05/04 22:13:04.941 +08:00] [INFO] [main.go:199] ["0 delete success: 300"] [2024/05/04 22:13:04.942 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... [2024/05/04 22:13:05.026 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:05.040 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:05.098 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:05.104 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:05.122 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:05.141 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... [2024/05/04 22:13:05.235 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:05.316 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:05.321 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:05.325 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:05.326 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:05.327 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:05.442 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:13:05.503 +08:00] [INFO] [main.go:178] ["1 insert success: 900"] [2024/05/04 22:13:05.506 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:05.508 +08:00] [INFO] [main.go:178] ["1 insert success: 900"] [2024/05/04 22:13:05.510 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:05.520 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:05.607 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:05.626 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:05.631 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:05.637 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:05.710 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:05.716 +08:00] [INFO] [main.go:178] ["0 insert success: 700"] [2024/05/04 22:13:05.716 +08:00] [INFO] [main.go:178] ["0 insert success: 700"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fce28c80013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-jvsw5-4r66k, pid:4477, start at 2024-05-04 22:13:05.234763924 +0800 CST m=+5.391078610 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:05.242 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:05.202 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:05.202 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:13:05.732 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:05.815 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:05.843 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:05.902 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:05.903 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:05.905 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:05.906 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:06.024 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:06.033 +08:00] [INFO] [main.go:178] ["1 insert success: 1000"] [2024/05/04 22:13:06.036 +08:00] [INFO] [main.go:178] ["1 insert success: 1000"] [2024/05/04 22:13:06.038 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:06.099 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:06.111 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:06.139 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:06.206 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:06.214 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:06.228 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] check diff failed 2-th time, retry later [2024/05/04 22:13:06.242 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:06.315 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:06.346 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:06.416 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:06.431 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:06.434 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:06.435 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:06.440 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:06.501 +08:00] [INFO] [main.go:178] ["0 insert success: 800"] [2024/05/04 22:13:06.505 +08:00] [INFO] [main.go:178] ["0 insert success: 800"] [2024/05/04 22:13:06.505 +08:00] [INFO] [main.go:199] ["0 delete success: 400"] [2024/05/04 22:13:06.510 +08:00] [INFO] [main.go:199] ["0 delete success: 400"] [2024/05/04 22:13:06.599 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:06.605 +08:00] [INFO] [main.go:178] ["1 insert success: 1100"] [2024/05/04 22:13:06.621 +08:00] [INFO] [main.go:178] ["1 insert success: 1100"] [2024/05/04 22:13:06.624 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:06.629 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:06.638 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:06.715 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:06.734 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [2024/05/04 22:13:06.744 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:06.804 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:06.816 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:06.853 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:06.918 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:06.934 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:06.942 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:06.998 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:07.003 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:07.030 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:07.123 +08:00] [INFO] [main.go:178] ["1 insert success: 1200"] [2024/05/04 22:13:07.135 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:07.140 +08:00] [INFO] [main.go:178] ["1 insert success: 1200"] [2024/05/04 22:13:07.140 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:07.228 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:07.236 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:07.239 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:07.242 +08:00] [INFO] [main.go:178] ["0 insert success: 900"] table test.lz4_finish_mark not exists for 2-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:13:07.298 +08:00] [INFO] [main.go:178] ["0 insert success: 900"] [2024/05/04 22:13:07.338 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:07.341 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:07.399 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:07.400 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:07.443 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... [2024/05/04 22:13:07.510 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:07.511 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:07.520 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:07.530 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:07.536 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:07.626 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:07.702 +08:00] [INFO] [main.go:178] ["1 insert success: 1300"] [2024/05/04 22:13:07.720 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:07.721 +08:00] [INFO] [main.go:178] ["1 insert success: 1300"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:13:07.798 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:07.896 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:07.896 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:07.898 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:07.996 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:08.005 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] check diff failed 3-th time, retry later [2024/05/04 22:13:08.007 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:08.020 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 22:13:08.109 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 22:13:08.112 +08:00] [INFO] [main.go:178] ["0 insert success: 1000"] [2024/05/04 22:13:08.116 +08:00] [INFO] [main.go:178] ["0 insert success: 1000"] [2024/05/04 22:13:08.117 +08:00] [INFO] [main.go:199] ["0 delete success: 500"] [2024/05/04 22:13:08.121 +08:00] [INFO] [main.go:199] ["0 delete success: 500"] [2024/05/04 22:13:08.125 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:08.131 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:08.137 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:08.142 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:08.205 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:08.238 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] table test.finish_mark not exists for 1-th check, retry later table test.finish_mark not exists for 2-th check, retry later table test.finish_mark not exists for 3-th check, retry later table test.finish_mark not exists for 4-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fce28c80013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-jvsw5-4r66k, pid:4477, start at 2024-05-04 22:13:05.234763924 +0800 CST m=+5.391078610 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:05.242 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:05.202 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:05.202 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fce2a4c0012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-jvsw5-4r66k, pid:4557, start at 2024-05-04 22:13:05.323362755 +0800 CST m=+5.429107286 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:05.331 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:05.299 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:05.299 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/batch_add_table/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/batch_add_table/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/batch_add_table/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/batch_add_table/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/batch_add_table/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [2024/05/04 22:13:08.308 +08:00] [INFO] [main.go:178] ["1 insert success: 1400"] [2024/05/04 22:13:08.325 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:08.330 +08:00] [INFO] [main.go:178] ["1 insert success: 1400"] [2024/05/04 22:13:08.407 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:08.504 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 22:13:08.505 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:08.506 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:13:08.602 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 22:13:08.609 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 22:13:08.609 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 22:13:08.621 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 22:13:08.709 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 22:13:08.719 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:08.724 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 22:13:08.731 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:08.736 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:08.740 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:08.818 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 22:13:08.838 +08:00] [INFO] [main.go:178] ["1 insert success: 1500"] [2024/05/04 22:13:08.904 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 22:13:08.904 +08:00] [INFO] [main.go:178] ["0 insert success: 1100"] [2024/05/04 22:13:08.907 +08:00] [INFO] [main.go:178] ["0 insert success: 1100"] [2024/05/04 22:13:08.930 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 22:13:09.026 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 22:13:09.033 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 22:13:09.034 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 22:13:09.131 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 22:13:09.135 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 22:13:09.135 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 22:13:09.199 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 22:13:09.231 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 22:13:09.240 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 22:13:09.242 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] table test.lz4_finish_mark exists check diff successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7926.out cli changefeed pause -c lz4 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fce58440009 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-mpv2f-zft7l, pid:4299, start at 2024-05-04 22:13:08.250554283 +0800 CST m=+5.319050370 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:08.257 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:08.241 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:08.241 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:13:09.303 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 22:13:09.303 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 22:13:09.305 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 22:13:09.330 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 22:13:09.354 +08:00] [INFO] [main.go:178] ["1 insert success: 1600"] [2024/05/04 22:13:09.406 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 22:13:09.429 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 22:13:09.518 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 22:13:09.522 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 22:13:09.526 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... table test.finish_mark not exists for 5-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:13:09.598 +08:00] [INFO] [main.go:178] ["0 insert success: 1200"] [2024/05/04 22:13:09.602 +08:00] [INFO] [main.go:199] ["0 delete success: 600"] [2024/05/04 22:13:09.626 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 22:13:09.634 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 22:13:09.636 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 22:13:09.700 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 22:13:09.736 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 22:13:09.740 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:13:09.796 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 22:13:09.816 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 22:13:09.818 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 22:13:09.820 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 22:13:09.845 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 22:13:09.911 +08:00] [INFO] [main.go:178] ["1 insert success: 1700"] [2024/05/04 22:13:09.925 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 22:13:09.942 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 22:13:10.026 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 22:13:10.028 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] check diff failed 4-th time, retry later [2024/05/04 22:13:10.039 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 22:13:10.138 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/05/04 22:13:10.198 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 22:13:10.199 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/05/04 22:13:10.247 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] [2024/05/04 22:13:10.302 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 22:13:10.304 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 22:13:10.306 +08:00] [INFO] [main.go:178] ["0 insert success: 1300"] [2024/05/04 22:13:10.327 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 22:13:10.330 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 22:13:10.331 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 22:13:10.357 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 22:13:10.405 +08:00] [INFO] [main.go:178] ["1 insert success: 1800"] [2024/05/04 22:13:10.433 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 22:13:10.467 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 22:13:10.476 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/05/04 22:13:10.545 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:13:10.632 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/05/04 22:13:10.638 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] [2024/05/04 22:13:10.655 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/05/04 22:13:10.658 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/05/04 22:13:10.658 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/05/04 22:13:10.718 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] [2024/05/04 22:13:10.742 +08:00] [INFO] [main.go:178] ["1 insert success: 1900"] [2024/05/04 22:13:10.753 +08:00] [INFO] [main.go:178] ["0 insert success: 1400"] [2024/05/04 22:13:10.757 +08:00] [INFO] [main.go:199] ["0 delete success: 700"] [2024/05/04 22:13:10.778 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7957.out cli changefeed remove -c lz4 [2024/05/04 22:13:10.811 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] [2024/05/04 22:13:10.927 +08:00] [INFO] [main.go:178] ["73 insert success: 1900"] [2024/05/04 22:13:10.931 +08:00] [INFO] [main.go:178] ["72 insert success: 1900"] [2024/05/04 22:13:10.949 +08:00] [INFO] [main.go:178] ["73 insert success: 1900"] [2024/05/04 22:13:10.955 +08:00] [INFO] [main.go:178] ["73 insert success: 1900"] [2024/05/04 22:13:10.957 +08:00] [INFO] [main.go:178] ["73 insert success: 1900"] [2024/05/04 22:13:11.023 +08:00] [INFO] [main.go:178] ["72 insert success: 1900"] [2024/05/04 22:13:11.048 +08:00] [INFO] [main.go:178] ["1 insert success: 2000"] [2024/05/04 22:13:11.077 +08:00] [INFO] [main.go:178] ["72 insert success: 1900"] [2024/05/04 22:13:11.142 +08:00] [INFO] [main.go:178] ["72 insert success: 1900"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fce58440009 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-mpv2f-zft7l, pid:4299, start at 2024-05-04 22:13:08.250554283 +0800 CST m=+5.319050370 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:08.257 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:08.241 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:08.241 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fce592c0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-mpv2f-zft7l, pid:4375, start at 2024-05-04 22:13:08.321231646 +0800 CST m=+5.336697759 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:08.327 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:08.299 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:08.299 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/changefeed_error/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/changefeed_error/tiflash/log/error.log arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_error/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_error/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_error/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [2024/05/04 22:13:11.355 +08:00] [INFO] [main.go:178] ["0 insert success: 1500"] [2024/05/04 22:13:11.534 +08:00] [INFO] [main.go:178] ["1 insert success: 2100"] [2024/05/04 22:13:11.559 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs55ef00b9_7c1b_43c7_81e5_49c29b929844"] table test.finish_mark not exists for 6-th check, retry later [Sat May 4 22:13:11 CST 2024] <<<<<< START cdc server in batch_add_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.batch_add_table.59025904.out server --log-file /tmp/tidb_cdc_test/batch_add_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/batch_add_table/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fce7c4c000c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2, pid:5763, start at 2024-05-04 22:13:10.558655296 +0800 CST m=+5.283273870 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:10.565 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:10.547 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:10.547 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:13:11.561 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsc8e33a5b_135f_4526_b10b_c7a031fe8c1b"] [2024/05/04 22:13:11.630 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs2ec7cae1_1dc4_40c1_8f3b_20fdb4115fc2"] [2024/05/04 22:13:11.635 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsc9800db0_d945_4ce3_9073_ecc48ee01d46"] [2024/05/04 22:13:11.638 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs1488e2a8_51af_4ec8_90db_e83bc3c7761b"] [2024/05/04 22:13:11.646 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsb768352f_853f_411d_a674_da74220c4844"] [2024/05/04 22:13:11.742 +08:00] [INFO] [main.go:178] ["0 insert success: 1600"] [2024/05/04 22:13:11.746 +08:00] [INFO] [main.go:199] ["0 delete success: 800"] Changefeed remove successfully. ID: lz4 CheckpointTs: 449532915806109702 SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-lz4-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=lz4 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... [2024/05/04 22:13:11.910 +08:00] [INFO] [main.go:178] ["1 insert success: 2200"] [2024/05/04 22:13:11.915 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs5971c2af_79c1_4e3b_ab44_b6daad3b850b"] [2024/05/04 22:13:11.930 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:11.930 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:11.939 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:11.940 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:11.949 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsfca8cce3_e823_4a4b_a4fb_5481b4b875d0"] [2024/05/04 22:13:12.039 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:12.043 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:12.047 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:12.049 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:12.050 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:12.052 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:12.055 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:12.060 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check_changefeed_state http://127.0.0.1:2379 cce10291-266d-44f2-8ee7-60d24402305c finished null + endpoints=http://127.0.0.1:2379 + changefeed_id=cce10291-266d-44f2-8ee7-60d24402305c + expected_state=finished + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c cce10291-266d-44f2-8ee7-60d24402305c -s + info='{ "upstream_id": 7365146820937394814, "namespace": "default", "id": "cce10291-266d-44f2-8ee7-60d24402305c", "state": "finished", "checkpoint_tso": 449532914835128321, "checkpoint_time": "2024-05-04 22:13:01.030", "error": null }' + echo '{ "upstream_id": 7365146820937394814, "namespace": "default", "id": "cce10291-266d-44f2-8ee7-60d24402305c", "state": "finished", "checkpoint_tso": 449532914835128321, "checkpoint_time": "2024-05-04 22:13:01.030", "error": null }' { "upstream_id": 7365146820937394814, "namespace": "default", "id": "cce10291-266d-44f2-8ee7-60d24402305c", "state": "finished", "checkpoint_tso": 449532914835128321, "checkpoint_time": "2024-05-04 22:13:01.030", "error": null } ++ echo '{' '"upstream_id":' 7365146820937394814, '"namespace":' '"default",' '"id":' '"cce10291-266d-44f2-8ee7-60d24402305c",' '"state":' '"finished",' '"checkpoint_tso":' 449532914835128321, '"checkpoint_time":' '"2024-05-04' '22:13:01.030",' '"error":' null '}' ++ jq -r .state + state=finished + [[ ! finished == \f\i\n\i\s\h\e\d ]] ++ echo '{' '"upstream_id":' 7365146820937394814, '"namespace":' '"default",' '"id":' '"cce10291-266d-44f2-8ee7-60d24402305c",' '"state":' '"finished",' '"checkpoint_tso":' 449532914835128321, '"checkpoint_time":' '"2024-05-04' '22:13:01.030",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:13:07 CST 2024] <<<<<< run test case changefeed_finish success! >>>>>> check diff failed 5-th time, retry later [2024/05/04 22:13:12.324 +08:00] [INFO] [main.go:178] ["1 insert success: 2300"] [2024/05/04 22:13:12.330 +08:00] [INFO] [main.go:178] ["0 insert success: 1700"] [2024/05/04 22:13:12.332 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:12.334 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:12.344 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:12.344 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:12.404 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:12.404 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:12.413 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:12.446 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:12.500 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:12.504 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:12.511 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:12.513 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:12.515 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:12.516 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:12.520 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:12.527 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:12.810 +08:00] [INFO] [main.go:178] ["1 insert success: 2400"] [2024/05/04 22:13:12.821 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:12.826 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:12.913 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:12.914 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:12.923 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:12.924 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:12.931 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:12.951 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:13.012 +08:00] [INFO] [main.go:178] ["0 insert success: 1800"] [2024/05/04 22:13:13.014 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:13.016 +08:00] [INFO] [main.go:199] ["0 delete success: 900"] [2024/05/04 22:13:13.017 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:13.027 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:13.027 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:13.032 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:13.037 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:13.047 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:13.047 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7991.out cli tso query --pd=http://127.0.0.1:2379 + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.5712.out cli tso query --pd=http://127.0.0.1:2379 pass check, checkpoint tso not forward after 10s run task successfully wait process 8799 exit for 1-th time... wait process 8799 exit for 2-th time... wait process 8799 exit for 3-th time... wait process 8799 exit for 4-th time... /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils/kill_cdc_pid: line 19: kill: (8799) - No such process wait process 8799 exit for 5-th time... process 8799 already exit [Sat May 4 22:13:06 CST 2024] <<<<<< START cdc server in ddl_only_block_related_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_only_block_related_table.95499551.out server --log-file /tmp/tidb_cdc_test/ddl_only_block_related_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_only_block_related_table/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:13:09 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: changefeedID: default/ddl-only-block-related-table {UpstreamID:7365146867010850524 Namespace:default ID:ddl-only-block-related-table SinkURI:kafka://127.0.0.1:9092/ticdc-common-1-test-26663?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 22:11:42.541645108 +0800 CST StartTs:449532894227464194 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc002f8cf30 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-52-g6a342866d Epoch:449532894253678596} {CheckpointTs:449532898172731405 MinTableBarrierTs:449532916863598599 AdminJobType:noop} span: {table_id:110,start_key:7480000000000000ff6e5f720000000000fa,end_key:7480000000000000ff6e5f730000000000fa}, resolvedTs: 449532898172731405, checkpointTs: 449532898172731405, state: Preparing span: {table_id:112,start_key:7480000000000000ff705f720000000000fa,end_key:7480000000000000ff705f730000000000fa}, resolvedTs: 449532898172731405, checkpointTs: 449532898172731405, state: Preparing span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449532898172731405, checkpointTs: 449532898172731405, state: Preparing span: {table_id:108,start_key:7480000000000000ff6c5f720000000000fa,end_key:7480000000000000ff6c5f730000000000fa}, resolvedTs: 449532898172731405, checkpointTs: 449532898172731405, state: Preparing *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/53251f42-aade-4f5a-8e39-1b4009b34c20 {"id":"53251f42-aade-4f5a-8e39-1b4009b34c20","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831987} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f200e5d4 53251f42-aade-4f5a-8e39-1b4009b34c20 /tidb/cdc/default/default/changefeed/info/ddl-only-block-related-table {"upstream-id":7365146867010850524,"namespace":"default","changefeed-id":"ddl-only-block-related-table","sink-uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-26663?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:11:42.541645108+08:00","start-ts":449532894227464194,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532894253678596} /tidb/cdc/default/default/changefeed/status/ddl-only-block-related-table {"checkpoint-ts":449532898172731405,"min-table-barrier-ts":449532917125742599,"admin-job-type":0} /tidb/cdc/default/default/task/position/53251f42-aade-4f5a-8e39-1b4009b34c20/ddl-only-block-related-table {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365146867010850524 {"id":7365146867010850524,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-only-block-related-table {UpstreamID:7365146867010850524 Namespace:default ID:ddl-only-block-related-table SinkURI:kafka://127.0.0.1:9092/ticdc-common-1-test-26663?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 22:11:42.541645108 +0800 CST StartTs:449532894227464194 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc002f8cf30 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-52-g6a342866d Epoch:449532894253678596} {CheckpointTs:449532898172731405 MinTableBarrierTs:449532916863598599 AdminJobType:noop} span: {table_id:110,start_key:7480000000000000ff6e5f720000000000fa,end_key:7480000000000000ff6e5f730000000000fa}, resolvedTs: 449532898172731405, checkpointTs: 449532898172731405, state: Preparing span: {table_id:112,start_key:7480000000000000ff705f720000000000fa,end_key:7480000000000000ff705f730000000000fa}, resolvedTs: 449532898172731405, checkpointTs: 449532898172731405, state: Preparing span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449532898172731405, checkpointTs: 449532898172731405, state: Preparing span: {table_id:108,start_key:7480000000000000ff6c5f720000000000fa,end_key:7480000000000000ff6c5f730000000000fa}, resolvedTs: 449532898172731405, checkpointTs: 449532898172731405, state: Preparing *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/53251f42-aade-4f5a-8e39-1b4009b34c20 {"id":"53251f42-aade-4f5a-8e39-1b4009b34c20","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831987} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f200e5d4 53251f42-aade-4f5a-8e39-1b4009b34c20 /tidb/cdc/default/default/changefeed/info/ddl-only-block-related-table {"upstream-id":7365146867010850524,"namespace":"default","changefeed-id":"ddl-only-block-related-table","sink-uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-26663?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:11:42.541645108+08:00","start-ts":449532894227464194,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532894253678596} /tidb/cdc/default/default/changefeed/status/ddl-only-block-related-table {"checkpoint-ts":449532898172731405,"min-table-barrier-ts":449532917125742599,"admin-job-type":0} /tidb/cdc/default/default/task/position/53251f42-aade-4f5a-8e39-1b4009b34c20/ddl-only-block-related-table {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365146867010850524 {"id":7365146867010850524,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-only-block-related-table {UpstreamID:7365146867010850524 Namespace:default ID:ddl-only-block-related-table SinkURI:kafka://127.0.0.1:9092/ticdc-common-1-test-26663?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 22:11:42.541645108 +0800 CST StartTs:449532894227464194 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc002f8cf30 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-52-g6a342866d Epoch:449532894253678596} {CheckpointTs:449532898172731405 MinTableBarrierTs:449532916863598599 AdminJobType:noop} span: {table_id:110,start_key:7480000000000000ff6e5f720000000000fa,end_key:7480000000000000ff6e5f730000000000fa}, resolvedTs: 449532898172731405, checkpointTs: 449532898172731405, state: Preparing span: {table_id:112,start_key:7480000000000000ff705f720000000000fa,end_key:7480000000000000ff705f730000000000fa}, resolvedTs: 449532898172731405, checkpointTs: 449532898172731405, state: Preparing span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449532898172731405, checkpointTs: 449532898172731405, state: Preparing span: {table_id:108,start_key:7480000000000000ff6c5f720000000000fa,end_key:7480000000000000ff6c5f730000000000fa}, resolvedTs: 449532898172731405, checkpointTs: 449532898172731405, state: Preparing *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/53251f42-aade-4f5a-8e39-1b4009b34c20 {"id":"53251f42-aade-4f5a-8e39-1b4009b34c20","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831987} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f200e5d4 53251f42-aade-4f5a-8e39-1b4009b34c20 /tidb/cdc/default/default/changefeed/info/ddl-only-block-related-table {"upstream-id":7365146867010850524,"namespace":"default","changefeed-id":"ddl-only-block-related-table","sink-uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-26663?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:11:42.541645108+08:00","start-ts":449532894227464194,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-chec+ grep -q 'etcd info' k-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532894253678596} /tidb/cdc/default/default/changefeed/status/ddl-only-block-related-table {"checkpoint-ts":449532898172731405,"min-table-barrier-ts":449532917125742599,"admin-job-type":0} /tidb/cdc/default/default/task/position/53251f42-aade-4f5a-8e39-1b4009b34c20/ddl-only-block-related-table {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365146867010850524 {"id":7365146867010850524,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + break + set +x check diff failed 1-th time, retry later check diff successfully check_ts_forward ddl-only-block-related-table [2024/05/04 22:13:13.222 +08:00] [INFO] [main.go:178] ["1 insert success: 2500"] [2024/05/04 22:13:13.229 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:13.237 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:13.330 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:13.335 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:13.341 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:13.345 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:13.348 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] table test.finish_mark not exists for 7-th check, retry later [2024/05/04 22:13:13.417 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:13.451 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:13.455 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:13.506 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:13.507 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:13.514 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:13.515 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:13.538 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:13.542 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:13.619 +08:00] [INFO] [main.go:178] ["0 insert success: 1900"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fce7c4c000c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2, pid:5763, start at 2024-05-04 22:13:10.558655296 +0800 CST m=+5.283273870 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:10.565 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:10.547 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:10.547 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fce7d080014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2, pid:5852, start at 2024-05-04 22:13:10.616292541 +0800 CST m=+5.287616761 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:10.624 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:10.594 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:10.594 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_simple_basic/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_simple_basic/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_simple_basic/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_simple_basic/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_simple_basic/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [2024/05/04 22:13:13.646 +08:00] [INFO] [main.go:178] ["1 insert success: 2600"] [2024/05/04 22:13:13.649 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:13.706 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:13.823 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:13.828 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:13.831 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:13.831 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:13.895 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcea14c000c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc, pid:7559, start at 2024-05-04 22:13:12.93275401 +0800 CST m=+8.022201252 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:12.940 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:12.915 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:12.915 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:13:13.939 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:14.012 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:14.026 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:14.026 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:14.037 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:14.042 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:14.050 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:14.117 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:14.120 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] changefeed is working normally rts: 449532917649768450->449532917911912450 checkpoint: 449532917649768450->449532917911912450 run task successfully + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:13:14 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/36bfb15c-f090-4551-a3b7-383c157eb3be {"id":"36bfb15c-f090-4551-a3b7-383c157eb3be","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831991} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f3615b18 36bfb15c-f090-4551-a3b7-383c157eb3be /tidb/cdc/default/default/upstream/7365147255065573413 {"id":7365147255065573413,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/36bfb15c-f090-4551-a3b7-383c157eb3be {"id":"36bfb15c-f090-4551-a3b7-383c157eb3be","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831991} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f3615b18 36bfb15c-f090-4551-a3b7-383c157eb3be /tidb/cdc/default/default/upstream/7365147255065573413 {"id":7365147255065573413,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/36bfb15c-f090-4551-a3b7-383c157eb3be {"id":"36bfb15c-f090-4551-a3b7-383c157eb3be","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831991} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f3615b18 36bfb15c-f090-4551-a3b7-383c157eb3be /tidb/cdc/default/default/upstream/7365147255065573413 {"id":7365147255065573413,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.batch_add_table.cli.5966.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-batch-add-table-test-26700?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' [2024/05/04 22:13:14.152 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:14.203 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:14.295 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:14.297 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:14.305 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:14.311 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:14.319 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:14.404 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] check diff successfully [2024/05/04 22:13:14.429 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:14.446 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:14.448 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:14.497 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:14.508 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:14.524 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:14.559 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:14.562 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:14.600 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:14.609 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] wait process cdc.test exit for 1-th time... Create changefeed successfully! ID: 57b57760-1f71-4f34-b60b-9ecb69eff7f4 Info: {"upstream_id":7365147255065573413,"namespace":"default","id":"57b57760-1f71-4f34-b60b-9ecb69eff7f4","sink_uri":"kafka://127.0.0.1:9092/ticdc-batch-add-table-test-26700?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:13:14.727984835+08:00","start_ts":449532918392946689,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532918392946689,"checkpoint_ts":449532918392946689,"checkpoint_time":"2024-05-04 22:13:14.602"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... [2024/05/04 22:13:14.704 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:14.708 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:14.711 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:14.717 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:14.725 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:14.802 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:14.831 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:14.850 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:14.899 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fceace40017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9b5lh-d8gp8, pid:7122, start at 2024-05-04 22:13:13.700522288 +0800 CST m=+5.401384494 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:13.710 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:13.707 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:13.707 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + tso='449532918086762499 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532918086762499 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.8030.out cli changefeed create --start-ts=449532918086762499 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-zstd-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=zstd' -c zstd + set +x + tso='449532918127919106 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532918127919106 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [2024/05/04 22:13:14.921 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:14.933 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:14.946 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:15.022 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:15.027 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:15.027 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:15.034 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:15.126 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:15.131 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:15.135 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:15.141 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:15.142 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_topics_v2/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Create changefeed successfully! ID: zstd Info: {"upstream_id":7365147085772336007,"namespace":"default","id":"zstd","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-zstd-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=zstd","create_time":"2024-05-04T22:13:15.366395686+08:00","start_ts":449532918086762499,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532918086762499,"checkpoint_ts":449532918086762499,"checkpoint_time":"2024-05-04 22:13:13.434"} PASS wait process cdc.test exit for 2-th time... ***************** properties ***************** "requestdistribution"="uniform" "readallfields"="true" "mysql.port"="4000" "insertproportion"="0" "dotransactions"="false" "workload"="core" "updateproportion"="0" "scanproportion"="0" "mysql.user"="root" "mysql.db"="changefeed_error" "operationcount"="0" "recordcount"="20" "mysql.host"="127.0.0.1" "readproportion"="0" "threadcount"="4" ********************************************** Run finished, takes 9.883398ms INSERT - Takes(s): 0.0, Count: 20, OPS: 3325.0, Avg(us): 1816, Min(us): 1105, Max(us): 3806, 95th(us): 4000, 99th(us): 4000 [Sat May 4 22:13:15 CST 2024] <<<<<< START cdc server in changefeed_error case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/NewChangefeedNoRetryError=1*return(true)' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.57865788.out server --log-file /tmp/tidb_cdc_test/changefeed_error/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_error/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [2024/05/04 22:13:15.208 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:15.242 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:15.307 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:15.313 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:15.333 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:15.345 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:15.396 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] coverage: 2.4% of statements in github.com/pingcap/tiflow/... [2024/05/04 22:13:15.442 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:15.499 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:15.503 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:15.503 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:15.556 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:15.559 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:15.599 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:15.600 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:15.605 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:15.632 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:15.659 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:15.730 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:15.733 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:15.806 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:15.816 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:15.831 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:15.921 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:15.923 +08:00] [INFO] [main.go:88] ["testGetDefaultValue take 15.544777392s"] table test.finish_mark not exists for 8-th check, retry later cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:13:15 CST 2024] <<<<<< run test case ddl_only_block_related_table success! >>>>>> VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcea14c000c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc, pid:7559, start at 2024-05-04 22:13:12.93275401 +0800 CST m=+8.022201252 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:12.940 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:12.915 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:12.915 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcea3800016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc, pid:7601, start at 2024-05-04 22:13:13.094569013 +0800 CST m=+5.466811787 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:13.105 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:13.106 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:13.106 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/multi_capture/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/multi_capture/tiflash/log/error.log arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_capture/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_capture/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_capture/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [2024/05/04 22:13:15.948 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:16.005 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:16.014 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:16.038 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:16.046 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:16.049 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:16.099 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:16.104 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:16.124 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:16.129 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:16.165 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [Sat May 4 22:13:15 CST 2024] <<<<<< START cdc server in kafka_simple_basic case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_basic.71857187.out server --log-file /tmp/tidb_cdc_test/kafka_simple_basic/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_simple_basic/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + set +x [Sat May 4 22:13:16 CST 2024] <<<<<< START kafka consumer in batch_add_table case >>>>>> check diff failed 1-th time, retry later [2024/05/04 22:13:16.196 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:16.224 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:16.232 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:16.249 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:16.330 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:16.353 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:16.431 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] table batch_add_table.finish_mark not exists for 1-th check, retry later [2024/05/04 22:13:16.440 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:16.443 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:16.500 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:16.508 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:16.520 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:16.527 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:16.550 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:16.550 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:16.621 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:16.625 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:16.652 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fceace40017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9b5lh-d8gp8, pid:7122, start at 2024-05-04 22:13:13.700522288 +0800 CST m=+5.401384494 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:13.710 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:13.707 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:13.707 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcead9c0016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-9b5lh-d8gp8, pid:7202, start at 2024-05-04 22:13:13.743677923 +0800 CST m=+5.396320825 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:13.752 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:13.753 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:13.753 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/common_1/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/common_1/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/common_1/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/common_1/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/common_1/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [2024/05/04 22:13:16.697 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:16.718 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:16.765 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:16.809 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:16.906 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:16.910 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:16.911 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:16.928 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:16.938 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:16.942 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] + set +x [Sat May 4 22:13:16 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>> [2024/05/04 22:13:16.946 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:17.007 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:17.014 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:17.036 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:17.051 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:17.100 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:17.112 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:17.137 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/05/04 22:13:15.330 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 22:13:15.362 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 22:13:15.463 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 22:13:15.471 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 22:13:16.463 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 22:13:16.471 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"]: syntax error: operand expected (error token is "[2024/05/04 22:13:15.330 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 22:13:15.362 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 22:13:15.463 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 22:13:15.471 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 22:13:16.463 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 22:13:16.471 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"]") table test.zstd_finish_mark not exists for 1-th check, retry later [2024/05/04 22:13:17.221 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:17.238 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:17.349 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:17.403 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:17.404 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:17.407 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:17.411 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:17.423 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:17.424 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:17.456 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:17.468 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:17.505 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:17.523 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:17.537 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:17.539 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:17.568 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:17.630 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:17.643 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] table test.finish_mark not exists for 9-th check, retry later [2024/05/04 22:13:17.905 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:17.916 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:17.921 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 22:13:17.925 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:17.930 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:17.930 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 22:13:17.931 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_stop_delay/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [2024/05/04 22:13:18.000 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:18.015 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:18.023 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 22:13:18.034 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:18.049 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:18.053 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 22:13:18.063 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 22:13:18.110 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:18.118 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] check diff failed 2-th time, retry later table batch_add_table.finish_mark not exists for 2-th check, retry later [2024/05/04 22:13:18.297 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 22:13:18.310 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 22:13:18.323 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 22:13:18.331 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:18.334 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 22:13:18.334 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:18.395 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 22:13:18.433 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 22:13:18.435 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 22:13:18.452 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 22:13:18.453 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.cli.8950.out cli tso query --pd=http://127.0.0.1:2379 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... start tidb cluster in /tmp/tidb_cdc_test/multi_topics_v2 Starting Upstream PD... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) + (( i++ )) + (( i <= 50 )) Release Version: v8.2.0-alpha-14-g1679dbca2 ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 * About to connect() to 127.0.0.1 port 8300 (#0) > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... > Host: 127.0.0.1:8300 > Accept: */* > * Trying 127.0.0.1... < HTTP/1.1 200 OK Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community < Date: Sat, 04 May 2024 14:13:18 GMT Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... < Content-Length: 815 * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 < Content-Type: text/plain; charset=utf-8 < { [data not shown] > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 * Connection #0 to host 127.0.0.1 left intact + res=' > Accept: */* > < HTTP/1.1 200 OK *** owner info ***: < Date: Sat, 04 May 2024 14:13:18 GMT < Content-Length: 815 *** processors info ***: *** etcd info ***: < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact /tidb/cdc/default/__cdc_meta__/capture/6e4acf12-3e3f-4979-b796-e379ccfb87b1 + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/accacaf9-8a19-42c2-9315-90c15d215db7 {"id":"6e4acf12-3e3f-4979-b796-e379ccfb87b1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831995} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f37286d0 6e4acf12-3e3f-4979-b796-e379ccfb87b1 /tidb/cdc/default/default/upstream/7365147279727216666 {"id":"accacaf9-8a19-42c2-9315-90c15d215db7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831995} {"id":7365147279727216666,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 *** processors info ***: /tidb/cdc/default/__cdc_meta__/owner/22318f43f36d6bed accacaf9-8a19-42c2-9315-90c15d215db7 *** etcd info ***: /tidb/cdc/default/default/upstream/7365147271517687926 /tidb/cdc/default/__cdc_meta__/capture/6e4acf12-3e3f-4979-b796-e379ccfb87b1 {"id":7365147271517687926,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/accacaf9-8a19-42c2-9315-90c15d215db7 {"id":"6e4acf12-3e3f-4979-b796-e379ccfb87b1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831995} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f37286d0 6e4acf12-3e3f-4979-b796-e379ccfb87b1 /tidb/cdc/default/default/upstream/7365147279727216666 {"id":"accacaf9-8a19-42c2-9315-90c15d215db7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831995} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 {"id":7365147279727216666,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' /tidb/cdc/default/__cdc_meta__/owner/22318f43f36d6bed + echo ' *** owner info ***: accacaf9-8a19-42c2-9315-90c15d215db7 *** processors info ***: /tidb/cdc/default/default/upstream/7365147271517687926 *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6e4acf12-3e3f-4979-b796-e379ccfb87b1 {"id":7365147271517687926,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/accacaf9-8a19-42c2-9315-90c15d215db7 {"id":"6e4acf12-3e3f-4979-b796-e379ccfb87b1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831995} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f37286d0 6e4acf12-3e3f-4979-b796-e379ccfb87b1 /tidb/cdc/default/default/upstream/7365147279727216666 {"id":7365147279727216666,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x {"id":"accacaf9-8a19-42c2-9315-90c15d215db7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831995} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f36d6bed accacaf9-8a19-42c2-9315-90c15d215db7 /tidb/cdc/default/default/upstream/7365147271517687926 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_basic.cli.7238.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-simple-basic-20529?protocol=simple' --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_basic/conf/changefeed.toml -c simple-basic {"id":7365147271517687926,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.5845.out cli changefeed create --start-ts=449532918127919106 '--sink-uri=kafka://127.0.0.1:9092/ticdc-sink-retry-test-8220?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c changefeed-error [2024/05/04 22:13:18.502 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 22:13:18.522 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 22:13:18.527 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 22:13:18.534 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 22:13:18.559 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 22:13:18.652 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 22:13:18.712 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 22:13:18.728 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 22:13:18.731 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 22:13:18.735 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 22:13:18.752 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] Create changefeed successfully! ID: changefeed-error Info: {"upstream_id":7365147271517687926,"namespace":"default","id":"changefeed-error","sink_uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-8220?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:13:18.819699279+08:00","start_ts":449532918127919106,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532918127919106,"checkpoint_ts":449532918127919106,"checkpoint_time":"2024-05-04 22:13:13.591"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... [2024/05/04 22:13:18.824 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 22:13:18.839 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 22:13:18.840 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 22:13:18.856 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 22:13:18.862 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 22:13:18.907 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 22:13:18.932 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 22:13:18.932 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 22:13:18.943 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 22:13:18.969 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 22:13:19.044 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 22:13:19.066 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 22:13:19.106 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 22:13:19.163 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 22:13:19.175 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 22:13:19.197 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 22:13:19.239 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 22:13:19.264 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 22:13:19.264 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 22:13:19.279 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] table test.zstd_finish_mark not exists for 2-th check, retry later Create changefeed successfully! ID: simple-basic Info: {"upstream_id":7365147279727216666,"namespace":"default","id":"simple-basic","sink_uri":"kafka://127.0.0.1:9092/ticdc-simple-basic-20529?protocol=simple","create_time":"2024-05-04T22:13:19.176379466+08:00","start_ts":449532919558176771,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"simple","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":5,"send_bootstrap_in_msg_count":100,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"correctness","corruption_handle_level":"error"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532919558176771,"checkpoint_ts":449532919558176771,"checkpoint_time":"2024-05-04 22:13:19.047"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... [2024/05/04 22:13:19.303 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 22:13:19.351 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/05/04 22:13:19.370 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 22:13:19.489 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 22:13:19.526 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] table test.finish_mark not exists for 10-th check, retry later [2024/05/04 22:13:19.549 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/05/04 22:13:19.578 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsa46315de_887a_4efb_9b74_e95140c7269f"] [2024/05/04 22:13:19.598 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs20e550f6_58cb_432d_922b_36a30b61b1b5"] [2024/05/04 22:13:19.600 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] [2024/05/04 22:13:19.670 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs47026486_3c5b_4759_b6b7_faf7104377f0"] [2024/05/04 22:13:19.690 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs9be24aa6_3597_4f3c_a8a1_fd7985e08dd6"] [2024/05/04 22:13:19.765 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsb706faeb_bfc0_4818_a0aa_4ab5615336cb"] + set +x + tso='449532919366287361 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532919366287361 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x check diff failed 3-th time, retry later ***************** properties ***************** "readproportion"="0" "updateproportion"="0" "mysql.host"="127.0.0.1" "insertproportion"="0" "workload"="core" "mysql.db"="multi_capture_1" "readallfields"="true" "threadcount"="2" "mysql.port"="4000" "dotransactions"="false" "operationcount"="0" "requestdistribution"="uniform" "scanproportion"="0" "mysql.user"="root" "recordcount"="10" ********************************************** Run finished, takes 8.689836ms INSERT - Takes(s): 0.0, Count: 10, OPS: 2037.7, Avg(us): 1638, Min(us): 992, Max(us): 3715, 95th(us): 4000, 99th(us): 4000 + set +x [Sat May 4 22:13:20 CST 2024] <<<<<< START kafka consumer in changefeed_error case >>>>>> check_changefeed_state http://127.0.0.1:2379 changefeed-error failed [CDC:ErrStartTsBeforeGC] + endpoints=http://127.0.0.1:2379 + changefeed_id=changefeed-error + expected_state=failed + error_msg='[CDC:ErrStartTsBeforeGC]' + tls_dir='[CDC:ErrStartTsBeforeGC]' + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-error -s ***************** properties ***************** "operationcount"="0" "scanproportion"="0" "workload"="core" "requestdistribution"="uniform" "mysql.port"="4000" "mysql.user"="root" "dotransactions"="false" "recordcount"="10" "mysql.db"="multi_capture_2" "readallfields"="true" "insertproportion"="0" "threadcount"="2" "readproportion"="0" "mysql.host"="127.0.0.1" "updateproportion"="0" ********************************************** Run finished, takes 9.475112ms INSERT - Takes(s): 0.0, Count: 10, OPS: 1795.9, Avg(us): 1794, Min(us): 1105, Max(us): 3785, 95th(us): 4000, 99th(us): 4000 [2024/05/04 22:13:20.103 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs23a79954_3f94_4f96_acf4_06a0687102eb"] [2024/05/04 22:13:20.134 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:20.134 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:20.197 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs54d3ed28_9f7c_48d8_848f_bc6126f757f1"] [2024/05/04 22:13:20.218 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:20.220 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] table batch_add_table.finish_mark exists =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/force_replicate_table/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... + info='{ "upstream_id": 7365147271517687926, "namespace": "default", "id": "changefeed-error", "state": "failed", "checkpoint_tso": 449532918127919106, "checkpoint_time": "2024-05-04 22:13:13.591", "error": { "time": "2024-05-04T22:13:18.903812826+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrStartTsBeforeGC", "message": "[CDC:ErrStartTsBeforeGC]fail to create or maintain changefeed because start-ts 449532918127918806 is earlier than or equal to GC safepoint at 449532918127919106" } }' + echo '{ "upstream_id": 7365147271517687926, "namespace": "default", "id": "changefeed-error", "state": "failed", "checkpoint_tso": 449532918127919106, "checkpoint_time": "2024-05-04 22:13:13.591", "error": { "time": "2024-05-04T22:13:18.903812826+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrStartTsBeforeGC", "message": "[CDC:ErrStartTsBeforeGC]fail to create or maintain changefeed because start-ts 449532918127918806 is earlier than or equal to GC safepoint at 449532918127919106" } }' { "upstream_id": 7365147271517687926, "namespace": "default", "id": "changefeed-error", "state": "failed", "checkpoint_tso": 449532918127919106, "checkpoint_time": "2024-05-04 22:13:13.591", "error": { "time": "2024-05-04T22:13:18.903812826+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrStartTsBeforeGC", "message": "[CDC:ErrStartTsBeforeGC]fail to create or maintain changefeed because start-ts 449532918127918806 is earlier than or equal to GC safepoint at 449532918127919106" } } ++ jq -r .state ++ echo '{' '"upstream_id":' 7365147271517687926, '"namespace":' '"default",' '"id":' '"changefeed-error",' '"state":' '"failed",' '"checkpoint_tso":' 449532918127919106, '"checkpoint_time":' '"2024-05-04' '22:13:13.591",' '"error":' '{' '"time":' '"2024-05-04T22:13:18.903812826+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrStartTsBeforeGC",' '"message":' '"[CDC:ErrStartTsBeforeGC]fail' to create or maintain changefeed because start-ts 449532918127918806 is earlier than or equal to GC safepoint at '449532918127919106"' '}' '}' + state=failed + [[ ! failed == \f\a\i\l\e\d ]] ++ echo '{' '"upstream_id":' 7365147271517687926, '"namespace":' '"default",' '"id":' '"changefeed-error",' '"state":' '"failed",' '"checkpoint_tso":' 449532918127919106, '"checkpoint_time":' '"2024-05-04' '22:13:13.591",' '"error":' '{' '"time":' '"2024-05-04T22:13:18.903812826+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrStartTsBeforeGC",' '"message":' '"[CDC:ErrStartTsBeforeGC]fail' to create or maintain changefeed because start-ts 449532918127918806 is earlier than or equal to GC safepoint at '449532918127919106"' '}' '}' ++ jq -r .error.message + message='[CDC:ErrStartTsBeforeGC]fail to create or maintain changefeed because start-ts 449532918127918806 is earlier than or equal to GC safepoint at 449532918127919106' + [[ ! [CDC:ErrStartTsBeforeGC]fail to create or maintain changefeed because start-ts 449532918127918806 is earlier than or equal to GC safepoint at 449532918127919106 =~ \[CDC:ErrStartTsBeforeGC] ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.5967.out cli changefeed resume -c changefeed-error ***************** properties ***************** "mysql.host"="127.0.0.1" "dotransactions"="false" "scanproportion"="0" "mysql.port"="4000" "requestdistribution"="uniform" "recordcount"="10" "mysql.user"="root" "updateproportion"="0" "mysql.db"="multi_capture_3" "threadcount"="2" "readproportion"="0" "insertproportion"="0" "operationcount"="0" "workload"="core" "readallfields"="true" ********************************************** Run finished, takes 10.164119ms INSERT - Takes(s): 0.0, Count: 10, OPS: 1808.4, Avg(us): 1949, Min(us): 1167, Max(us): 4503, 95th(us): 5000, 99th(us): 5000 Verifying downstream PD is started... check diff successfully [2024/05/04 22:13:20.433 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:20.502 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:20.503 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:20.508 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:20.543 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:20.544 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:20.659 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] + set +x ***************** properties ***************** "updateproportion"="0" "dotransactions"="false" "recordcount"="10" "scanproportion"="0" "insertproportion"="0" "mysql.user"="root" "mysql.db"="multi_capture_4" "readproportion"="0" "workload"="core" "readallfields"="true" "threadcount"="2" "operationcount"="0" "requestdistribution"="uniform" "mysql.host"="127.0.0.1" "mysql.port"="4000" ********************************************** Run finished, takes 9.876071ms INSERT - Takes(s): 0.0, Count: 10, OPS: 1700.2, Avg(us): 1883, Min(us): 1110, Max(us): 3891, 95th(us): 4000, 99th(us): 4000 [Sat May 4 22:13:20 CST 2024] <<<<<< START cdc server in multi_capture case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.90769078.out server --log-file /tmp/tidb_cdc_test/multi_capture/cdc1.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_capture/cdc_data1 --cluster-id default --addr 127.0.0.1:8301 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8301 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8301; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [2024/05/04 22:13:20.696 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:20.714 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:20.716 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:20.724 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:20.725 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:20.735 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:20.735 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:20.804 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsb78c4ef7_3f23_4e7b_9b78_e4687eac48d8"] [2024/05/04 22:13:20.847 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:20.859 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:20.859 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:20.863 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.common_1.cli.8598.out cli tso query --pd=http://127.0.0.1:2379 [2024/05/04 22:13:20.932 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:20.933 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:21.132 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] table test.zstd_finish_mark not exists for 3-th check, retry later wait process cdc.test exit for 1-th time... [2024/05/04 22:13:21.197 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:21.204 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:21.217 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:21.219 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:21.225 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:21.230 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:21.235 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:21.262 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:21.297 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:21.326 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:21.330 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:21.337 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:21.342 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:21.370 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:21.370 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] wait process cdc.test exit for 2-th time... PASS coverage: 2.1% of statements in github.com/pingcap/tiflow/... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table test.finish_mark not exists for 11-th check, retry later [2024/05/04 22:13:21.525 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:21.618 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:21.629 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:21.644 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:21.645 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:21.651 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:21.701 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:21.704 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:21.728 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:21.749 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:21.815 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:21.819 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:21.826 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:21.830 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:21.843 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:21.899 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:13:22 CST 2024] <<<<<< run test case batch_add_table success! >>>>>> start tidb cluster in /tmp/tidb_cdc_test/processor_stop_delay Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... [2024/05/04 22:13:22.109 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:22.224 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:22.242 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:22.306 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:22.310 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:22.323 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:22.325 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:22.329 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:22.353 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:22.396 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:22.429 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:22.431 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:22.432 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:22.433 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:22.439 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:22.443 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] check diff failed 4-th time, retry later [2024/05/04 22:13:22.611 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] + set +x table changefeed_error.usertable not exists for 1-th check, retry later + set +x + tso='449532920072241153 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532920072241153 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 22:13:22 CST 2024] <<<<<< START cdc server in common_1 case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.common_1.86358637.out server --log-file /tmp/tidb_cdc_test/common_1/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/common_1/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [2024/05/04 22:13:22.733 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:22.809 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:22.821 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:22.830 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:22.844 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:22.847 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:22.847 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:22.910 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:22.911 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:22.944 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:22.946 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:22.947 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:22.949 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:22.953 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:22.955 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] table test.zstd_finish_mark exists [2024/05/04 22:13:23.031 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:23.208 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] check diff successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.8161.out cli changefeed pause -c zstd Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:13:23.305 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:23.313 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:23.335 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:23.406 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:23.411 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:23.413 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [2024/05/04 22:13:23.500 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:23.502 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:23.545 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:23.547 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:23.548 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:23.548 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:23.549 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:23.553 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:23.632 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:23.708 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] start tidb cluster in /tmp/tidb_cdc_test/force_replicate_table Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... [2024/05/04 22:13:23.746 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:23.804 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:23.895 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:23.909 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:23.915 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:23.915 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8301 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8301 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8301 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:13:23 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/577690d7-6ea1-456f-95c0-6d12e41faf2d {"id":"577690d7-6ea1-456f-95c0-6d12e41faf2d","address":"127.0.0.1:8301","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832001} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f376e344 577690d7-6ea1-456f-95c0-6d12e41faf2d /tidb/cdc/default/default/upstream/7365147275495807020 {"id":7365147275495807020,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/577690d7-6ea1-456f-95c0-6d12e41faf2d {"id":"577690d7-6ea1-456f-95c0-6d12e41faf2d","address":"127.0.0.1:8301","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832001} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f376e344 577690d7-6ea1-456f-95c0-6d12e41faf2d /tidb/cdc/default/default/upstream/7365147275495807020 {"id":7365147275495807020,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/577690d7-6ea1-456f-95c0-6d12e41faf2d {"id":"577690d7-6ea1-456f-95c0-6d12e41faf2d","address":"127.0.0.1:8301","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832001} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f376e344 577690d7-6ea1-456f-95c0-6d12e41faf2d /tidb/cdc/default/default/upstream/7365147275495807020 {"id":7365147275495807020,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 22:13:23 CST 2024] <<<<<< START cdc server in multi_capture case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.91339135.out server --log-file /tmp/tidb_cdc_test/multi_capture/cdc2.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_capture/cdc_data2 --cluster-id default --addr 127.0.0.1:8302 ++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8302 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8302; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [2024/05/04 22:13:24.007 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:24.016 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:24.101 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:24.102 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:24.105 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:24.105 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:24.106 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:24.125 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:24.151 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:24.204 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:24.234 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:24.243 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:24.314 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:24.329 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:24.332 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:24.333 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:24.417 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:24.420 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] check diff failed 5-th time, retry later [2024/05/04 22:13:24.526 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:24.529 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:24.531 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:24.532 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:24.534 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:24.551 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:24.606 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:24.632 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:24.652 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:24.707 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] table changefeed_error.usertable exists check diff failed 1-th time, retry later table test.finish_mark not exists for 12-th check, retry later [2024/05/04 22:13:24.803 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:24.813 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:24.821 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:24.826 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:24.926 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:24.926 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:25.122 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:25.124 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:25.125 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:25.127 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:25.128 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:25.198 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:25.218 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.8194.out cli changefeed remove -c zstd [2024/05/04 22:13:25.308 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:25.314 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:25.321 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:25.496 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:25.504 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:25.515 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:25.522 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:25.619 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:25.620 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] Changefeed remove successfully. ID: zstd CheckpointTs: 449532919004266528 SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-zstd-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=zstd PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... [Sat May 4 22:13:25 CST 2024] <<<<<< START kafka consumer in kafka_simple_basic case >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:13:25.812 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:25.820 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:25.822 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:25.823 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:25.825 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:25.826 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:25.904 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:25.918 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:25.919 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:25.927 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] table test.finish_mark not exists for 13-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:13:25 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a8b5e91e-6b2d-4945-a4c2-93bfe2a503d7 {"id":"a8b5e91e-6b2d-4945-a4c2-93bfe2a503d7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832002} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f37e7adc a8b5e91e-6b2d-4945-a4c2-93bfe2a503d7 /tidb/cdc/default/default/upstream/7365147296402358603 {"id":7365147296402358603,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a8b5e91e-6b2d-4945-a4c2-93bfe2a503d7 {"id":"a8b5e91e-6b2d-4945-a4c2-93bfe2a503d7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832002} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f37e7adc a8b5e91e-6b2d-4945-a4c2-93bfe2a503d7 /tidb/cdc/default/default/upstream/7365147296402358603 {"id":7365147296402358603,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a8b5e91e-6b2d-4945-a4c2-93bfe2a503d7 {"id":"a8b5e91e-6b2d-4945-a4c2-93bfe2a503d7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832002} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f37e7adc a8b5e91e-6b2d-4945-a4c2-93bfe2a503d7 /tidb/cdc/default/default/upstream/7365147296402358603 {"id":7365147296402358603,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: 0a158644-b2c4-4fde-b855-f39d82c3f8c5 Info: {"upstream_id":7365147296402358603,"namespace":"default","id":"0a158644-b2c4-4fde-b855-f39d82c3f8c5","sink_uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-20185?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:13:25.799132134+08:00","start_ts":449532920072241153,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532920072241153,"checkpoint_ts":449532920072241153,"checkpoint_time":"2024-05-04 22:13:21.008"} [Sat May 4 22:13:25 CST 2024] <<<<<< START kafka consumer in common_1 case >>>>>> [2024/05/04 22:13:26.030 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:26.111 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:26.113 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:26.125 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] check diff successfully [2024/05/04 22:13:26.307 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:26.315 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:26.533 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:26.535 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:26.596 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:26.606 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:26.610 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:26.612 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:26.631 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:26.634 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:26.643 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:26.653 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:26.712 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:26.728 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:26.728 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:26.796 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:26.917 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:26.927 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table test.finish_mark_for_ddl not exists for 1-th check, retry later check diff successfully ***************** properties ***************** "scanproportion"="0" "operationcount"="0" "updateproportion"="0" "readproportion"="0" "mysql.db"="changefeed_error" "insertproportion"="0" "mysql.port"="4000" "readallfields"="true" "recordcount"="20" "threadcount"="4" "mysql.host"="127.0.0.1" "mysql.user"="root" "workload"="core" "requestdistribution"="uniform" "dotransactions"="false" ********************************************** Run finished, takes 5.174848ms INSERT - Takes(s): 0.0, Count: 20, OPS: 5042.2, Avg(us): 853, Min(us): 464, Max(us): 1796, 95th(us): 2000, 99th(us): 2000 check diff successfully {"id":"accacaf9-8a19-42c2-9315-90c15d215db7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714831995} check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/capture' 'capture' + key_prefix=/tidb/cdc/default/__cdc_meta__/capture + message=capture ++ etcdctl get /tidb/cdc/default/__cdc_meta__/capture --prefix --keys-only + info=/tidb/cdc/default/__cdc_meta__/capture/accacaf9-8a19-42c2-9315-90c15d215db7 + [[ /tidb/cdc/default/__cdc_meta__/capture/accacaf9-8a19-42c2-9315-90c15d215db7 =~ capture ]] + echo 'capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/accacaf9-8a19-42c2-9315-90c15d215db7' capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/accacaf9-8a19-42c2-9315-90c15d215db7 + echo 'check failed' check failed + exit 1 run task failed 1-th time, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8302 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8302 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8302 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:13:26 GMT < Content-Length: 1271 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/577690d7-6ea1-456f-95c0-6d12e41faf2d {"id":"577690d7-6ea1-456f-95c0-6d12e41faf2d","address":"127.0.0.1:8301","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832001} /tidb/cdc/default/__cdc_meta__/capture/90e77db4-6860-436a-8620-8a919099ee0c {"id":"90e77db4-6860-436a-8620-8a919099ee0c","address":"127.0.0.1:8302","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832004} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f376e344 577690d7-6ea1-456f-95c0-6d12e41faf2d /tidb/cdc/default/__cdc_meta__/owner/22318f43f376e37b 90e77db4-6860-436a-8620-8a919099ee0c /tidb/cdc/default/default/upstream/7365147275495807020 {"id":7365147275495807020,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/577690d7-6ea1-456f-95c0-6d12e41faf2d {"id":"577690d7-6ea1-456f-95c0-6d12e41faf2d","address":"127.0.0.1:8301","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832001} /tidb/cdc/default/__cdc_meta__/capture/90e77db4-6860-436a-8620-8a919099ee0c {"id":"90e77db4-6860-436a-8620-8a919099ee0c","address":"127.0.0.1:8302","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832004} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f376e344 577690d7-6ea1-456f-95c0-6d12e41faf2d /tidb/cdc/default/__cdc_meta__/owner/22318f43f376e37b 90e77db4-6860-436a-8620-8a919099ee0c /tidb/cdc/default/default/upstream/7365147275495807020 {"id":7365147275495807020,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + echo ' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/577690d7-6ea1-456f-95c0-6d12e41faf2d {"id":"577690d7-6ea1-456f-95c0-6d12e41faf2d","address":"127.0.0.1:8301","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832001} /tidb/cdc/default/__cdc_meta__/capture/90e77db4-6860-436a-8620-8a919099ee0c {"id":"90e77db4-6860-436a-8620-8a919099ee0c","address":"127.0.0.1:8302","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832004} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f376e344 577690d7-6ea1-456f-95c0-6d12e41faf2d /tidb/cdc/default/__cdc_meta__/owner/22318f43f376e37b 90e77db4-6860-436a-8620-8a919099ee0c /tidb/cdc/default/default/upstream/7365147275495807020 {"id":7365147275495807020,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + break + set +x [Sat May 4 22:13:26 CST 2024] <<<<<< START cdc server in multi_capture case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8303/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.91889190.out server --log-file /tmp/tidb_cdc_test/multi_capture/cdc3.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_capture/cdc_data3 --cluster-id default --addr 127.0.0.1:8303 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8303/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8303 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8303; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [2024/05/04 22:13:27.313 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:27.343 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:27.346 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:27.348 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:27.399 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:27.402 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:27.418 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:27.419 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:27.423 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:27.453 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:27.490 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:27.490 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] + set +x wait process cdc.test exit for 1-th time... [2024/05/04 22:13:27.724 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 22:13:27.809 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:13:27.857 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:27.861 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:27.897 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:27.901 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:27.907 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 22:13:27.933 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:27.933 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 22:13:27.937 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_auto_stop/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... check diff failed 1-th time, retry later wait process cdc.test exit for 2-th time... [2024/05/04 22:13:28.097 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 22:13:28.300 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 22:13:28.352 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 22:13:28.425 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 22:13:28.429 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 22:13:28.430 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 22:13:28.449 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:28.452 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 22:13:28.455 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 22:13:28.522 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs9d5015fa_206f_4f74_a8d4_a8eba1a0772f"] table common_1.v1 not exists for 1-th check, retry later table test.finish_mark not exists for 14-th check, retry later [2024/05/04 22:13:28.828 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 22:13:28.839 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsa123711f_44e1_4aa5_afbe_175d1ef8ca14"] cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:13:28 CST 2024] <<<<<< run test case kafka_compression success! >>>>>> Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:13:28.851 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 22:13:28.905 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 22:13:28.906 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 22:13:28.961 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:28.975 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:28.980 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs8113a54c_b9da_43fd_9b0d_e2006ec6be69"] table test.finish_mark_for_ddl not exists for 2-th check, retry later check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/capture' 'capture' + key_prefix=/tidb/cdc/default/__cdc_meta__/capture + message=capture ++ etcdctl get /tidb/cdc/default/__cdc_meta__/capture --prefix --keys-only + info=/tidb/cdc/default/__cdc_meta__/capture/accacaf9-8a19-42c2-9315-90c15d215db7 + [[ /tidb/cdc/default/__cdc_meta__/capture/accacaf9-8a19-42c2-9315-90c15d215db7 =~ capture ]] + echo 'capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/accacaf9-8a19-42c2-9315-90c15d215db7' capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/accacaf9-8a19-42c2-9315-90c15d215db7 + echo 'check failed' check failed + exit 1 run task failed 2-th time, retry later [2024/05/04 22:13:29.226 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs3ccb8245_3c66_45e4_9315_9984a8087c10"] [2024/05/04 22:13:29.302 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:29.305 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:29.312 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 22:13:29.346 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 22:13:29.401 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:29.403 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:29.410 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:29.462 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:29.499 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs28548b2e_e98a_4e54_a775_fc2f06be1243"] [2024/05/04 22:13:29.542 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs0cf3419c_3e7c_4f3c_b957_d9731c0f0e18"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcf96ac0013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-n5hp4-vjsxk, pid:8760, start at 2024-05-04 22:13:28.653009116 +0800 CST m=+5.309750875 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:28.663 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:28.669 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:28.669 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:13:29.616 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:29.628 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8303/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8303 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8303 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8303 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:13:29 GMT < Content-Length: 1750 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/577690d7-6ea1-456f-95c0-6d12e41faf2d {"id":"577690d7-6ea1-456f-95c0-6d12e41faf2d","address":"127.0.0.1:8301","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832001} /tidb/cdc/default/__cdc_meta__/capture/90e77db4-6860-436a-8620-8a919099ee0c {"id":"90e77db4-6860-436a-8620-8a919099ee0c","address":"127.0.0.1:8302","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832004} /tidb/cdc/default/__cdc_meta__/capture/9c33c48a-d600-4807-80ed-9e14476ae4e9 {"id":"9c33c48a-d600-4807-80ed-9e14476ae4e9","address":"127.0.0.1:8303","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832007} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f376e344 577690d7-6ea1-456f-95c0-6d12e41faf2d /tidb/cdc/default/__cdc_meta__/owner/22318f43f376e37b 90e77db4-6860-436a-8620-8a919099ee0c /tidb/cdc/default/__cdc_meta__/owner/22318f43f376e396 9c33c48a-d600-4807-80ed-9e14476ae4e9 /tidb/cdc/default/default/upstream/7365147275495807020 {"id":7365147275495807020,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/577690d7-6ea1-456f-95c0-6d12e41faf2d {"id":"577690d7-6ea1-456f-95c0-6d12e41faf2d","address":"127.0.0.1:8301","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832001} /tidb/cdc/default/__cdc_meta__/capture/90e77db4-6860-436a-8620-8a919099ee0c {"id":"90e77db4-6860-436a-8620-8a919099ee0c","address":"127.0.0.1:8302","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832004} /tidb/cdc/default/__cdc_meta__/capture/9c33c48a-d600-4807-80ed-9e14476ae4e9 {"id":"9c33c48a-d600-4807-80ed-9e14476ae4e9","address":"127.0.0.1:8303","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832007} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f376e344 577690d7-6ea1-456f-95c0-6d12e41faf2d /tidb/cdc/default/__cdc_meta__/owner/22318f43f376e37b 90e77db4-6860-436a-8620-8a919099ee0c /tidb/cdc/default/__cdc_meta__/owner/22318f43f376e396 9c33c48a-d600-4807-80ed-9e14476ae4e9 /tidb/cdc/default/default/upstream/7365147275495807020 {"id":7365147275495807020,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/577690d7-6ea1-456f-95c0-6d12e41faf2d {"id":"577690d7-6ea1-456f-95c0-6d12e41faf2d","address":"127.0.0.1:8301","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832001} /tidb/cdc/default/__cdc_meta__/capture/90e77db4-6860-436a-8620-8a919099ee0c {"id":"90e77db4-6860-436a-8620-8a919099ee0c","address":"127.0.0.1:8302","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832004} /tidb/cdc/default/__cdc_meta__/capture/9c33c48a-d600-4807-80ed-9e14476ae4e9 {"id":"9c33c48a-d600-4807-80ed-9e14476ae4e9","address":"127.0.0.1:8303","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832007} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f376e344 577690d7-6ea1-456f-95c0-6d12e41faf2d /tidb/cdc/default/__cdc_meta__/owner/22318f43f376e37b 90e77db4-6860-436a-8620-8a919099ee0c /tidb/cdc/default/__cdc_meta__/owner/22318f43f376e396 9c33c48a-d600-4807-80ed-9e14476ae4e9 /tidb/cdc/default/default/upstream/7365147275495807020 {"id":7365147275495807020,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.cli.9246.out cli changefeed create --start-ts=449532919366287361 '--sink-uri=kafka://127.0.0.1:9092/ticdc-multi-capture-test-16873?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --server=127.0.0.1:8301 [2024/05/04 22:13:29.905 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:29.911 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:29.912 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 22:13:29.997 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 22:13:30.020 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:30.025 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:30.027 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:30.057 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:30.098 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:30.116 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:30.119 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:30.127 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:30.139 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:30.140 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] Create changefeed successfully! ID: 10029e15-f97f-4cb3-b32e-52b318656c16 Info: {"upstream_id":7365147275495807020,"namespace":"default","id":"10029e15-f97f-4cb3-b32e-52b318656c16","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-capture-test-16873?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:13:30.370195359+08:00","start_ts":449532919366287361,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532919366287361,"checkpoint_ts":449532919366287361,"checkpoint_time":"2024-05-04 22:13:18.315"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... [2024/05/04 22:13:30.428 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:30.430 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] table common_1.v1 not exists for 2-th check, retry later table test.finish_mark not exists for 15-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/changefeed_auto_stop Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... check diff failed 2-th time, retry later [2024/05/04 22:13:30.640 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:30.643 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:30.643 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:30.701 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:30.707 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:30.722 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:30.727 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:30.731 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:30.731 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:30.736 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:30.737 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs85ca314e_102e_4480_8a67_f2d4cea22b5b"] [2024/05/04 22:13:30.839 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:30.839 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... [2024/05/04 22:13:31.224 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:31.227 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:31.296 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:31.339 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:31.402 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:31.420 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:31.426 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:31.428 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:31.430 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:31.435 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:31.435 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:31.438 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcf96ac0013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-n5hp4-vjsxk, pid:8760, start at 2024-05-04 22:13:28.653009116 +0800 CST m=+5.309750875 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:28.663 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:28.669 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:28.669 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcf9768000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-n5hp4-vjsxk, pid:8829, start at 2024-05-04 22:13:28.683319196 +0800 CST m=+5.282095571 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:28.690 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:28.666 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:28.666 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.finish_mark_for_ddl not exists for 3-th check, retry later [2024/05/04 22:13:31.525 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:31.526 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:31.641 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsb1c3cbac_c263_4fc6_b5d1_a233a484c61d"] Logging trace to /tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_topics_v2/tiflash-proxy.toml"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_topics_v2/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [2024/05/04 22:13:31.707 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:31.722 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:31.725 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:31.832 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:31.918 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:31.998 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:32.007 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:32.011 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:32.015 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:32.017 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:32.020 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:32.028 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:32.146 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:32.152 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] + set +x [Sat May 4 22:13:31 CST 2024] <<<<<< START kafka consumer in multi_capture case >>>>>> table multi_capture_1.usertable not exists for 1-th check, retry later [2024/05/04 22:13:32.235 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:32.245 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:32.311 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:32.315 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:32.322 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] table common_1.v1 exists table common_1.recover_and_insert not exists for 1-th check, retry later table test.finish_mark not exists for 16-th check, retry later check diff failed 3-th time, retry later [2024/05/04 22:13:32.514 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:32.610 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:32.703 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:32.708 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:32.708 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:32.712 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:32.714 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:32.718 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:32.725 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:32.820 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:32.831 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:32.897 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:32.899 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:32.909 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:32.924 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:32.928 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:33.109 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:33.218 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/capture' 'capture' + key_prefix=/tidb/cdc/default/__cdc_meta__/capture + message=capture ++ etcdctl get /tidb/cdc/default/__cdc_meta__/capture --prefix --keys-only + info=/tidb/cdc/default/__cdc_meta__/capture/accacaf9-8a19-42c2-9315-90c15d215db7 + [[ /tidb/cdc/default/__cdc_meta__/capture/accacaf9-8a19-42c2-9315-90c15d215db7 =~ capture ]] + echo 'capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/accacaf9-8a19-42c2-9315-90c15d215db7' capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/accacaf9-8a19-42c2-9315-90c15d215db7 + echo 'check failed' check failed + exit 1 run task failed 3-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:13:33.407 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:33.408 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:33.413 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:33.419 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:33.421 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:33.425 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:33.432 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:33.506 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] table test.finish_mark_for_ddl not exists for 4-th check, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_topics_v2.cli.10277.out cli tso query --pd=http://127.0.0.1:2379 [2024/05/04 22:13:33.519 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:33.535 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:33.539 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:33.542 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:33.561 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:33.562 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:33.614 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:33.639 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:34.013 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:34.020 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcfc8b4000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-r9cq4-jzgfv, pid:7079, start at 2024-05-04 22:13:31.83641062 +0800 CST m=+6.327469790 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:31.842 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:31.821 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:31.821 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcfc8b4000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-r9cq4-jzgfv, pid:7079, start at 2024-05-04 22:13:31.83641062 +0800 CST m=+6.327469790 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:31.842 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:31.821 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:31.821 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcfb9040013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-r9cq4-jzgfv, pid:7162, start at 2024-05-04 22:13:30.841442658 +0800 CST m=+5.280447181 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:30.848 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:30.817 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:30.817 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/processor_stop_delay/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/processor_stop_delay/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/processor_stop_delay/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/processor_stop_delay/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/processor_stop_delay/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table multi_capture_1.usertable not exists for 2-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_rocks/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [2024/05/04 22:13:34.035 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:34.037 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:34.103 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:34.106 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:34.116 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:34.157 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:34.213 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:34.228 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:34.230 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:34.236 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:34.245 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:34.246 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] table common_1.recover_and_insert not exists for 2-th check, retry later table test.finish_mark exists check diff successfully check diff failed 4-th time, retry later [2024/05/04 22:13:34.319 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:34.329 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:34.629 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:34.708 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] wait process cdc.test exit for 1-th time... [2024/05/04 22:13:34.817 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:34.818 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:34.835 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:34.837 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:34.911 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:34.926 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:35.011 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:35.030 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcfeaa80013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6j6mj-4nbr6, pid:4304, start at 2024-05-04 22:13:34.017734308 +0800 CST m=+5.230307031 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:34.024 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:33.994 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:33.994 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:13:35.097 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:35.105 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:35.111 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:35.111 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:35.123 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:35.138 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] table test.finish_mark_for_ddl not exists for 5-th check, retry later [2024/05/04 22:13:35.495 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:35.527 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] wait process cdc.test exit for 2-th time... + set +x + tso='449532923469889537 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532923469889537 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 22:13:35 CST 2024] <<<<<< START cdc server in multi_topics_v2 case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_topics_v2.1031410316.out server --log-file /tmp/tidb_cdc_test/multi_topics_v2/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_topics_v2/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [2024/05/04 22:13:35.704 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:35.706 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:35.721 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:35.726 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:35.798 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:35.804 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:35.829 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:35.898 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:35.912 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:35.923 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:35.924 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:35.933 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:35.935 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:35.948 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [Sat May 4 22:13:35 CST 2024] <<<<<< START cdc server in processor_stop_delay case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/processorStopDelay=1*sleep(10000)' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.processor_stop_delay.85088510.out server --log-file /tmp/tidb_cdc_test/processor_stop_delay/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/processor_stop_delay/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:13:35 CST 2024] <<<<<< run test case many_pk_or_uk success! >>>>>> table multi_capture_1.usertable exists table multi_capture_2.usertable exists table multi_capture_3.usertable not exists for 1-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:13:36.123 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:36.304 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] table common_1.recover_and_insert not exists for 3-th check, retry later [2024/05/04 22:13:36.404 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:36.411 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:36.430 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:36.431 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:36.526 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:36.531 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:36.608 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:36.711 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:36.723 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:36.723 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:36.726 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:36.797 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:36.800 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:36.806 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] check diff failed 5-th time, retry later [2024/05/04 22:13:36.902 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:37.007 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:37.023 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:37.029 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcfeaa80013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6j6mj-4nbr6, pid:4304, start at 2024-05-04 22:13:34.017734308 +0800 CST m=+5.230307031 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:34.024 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:33.994 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:33.994 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fcff2640008 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6j6mj-4nbr6, pid:4382, start at 2024-05-04 22:13:34.498841951 +0800 CST m=+5.656725134 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:34.505 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:34.489 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:34.489 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/force_replicate_table/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/force_replicate_table/tiflash/log/error.log arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/force_replicate_table/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/force_replicate_table/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/force_replicate_table/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table test.finish_mark_for_ddl exists + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_basic.cli.7366.out cli changefeed pause -c simple-basic [2024/05/04 22:13:37.102 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:37.106 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:37.202 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:37.209 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:37.218 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] start tidb cluster in /tmp/tidb_cdc_test/multi_rocks Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... [2024/05/04 22:13:37.408 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:37.416 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:37.514 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:37.517 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:37.524 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:37.628 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:37.731 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:37.731 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:37.741 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:37.752 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:37.753 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:37.834 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... table multi_capture_3.usertable exists table multi_capture_4.usertable not exists for 1-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:13:37.908 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:37.929 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:38.029 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:38.032 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 22:13:38.112 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:38.308 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:38.416 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:38.432 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:38.434 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 22:13:38.449 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:38.449 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:38.533 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:38.535 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:38.550 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:38.611 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] table common_1.recover_and_insert exists table common_1.finish_mark not exists for 1-th check, retry later [2024/05/04 22:13:38.733 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:13:38 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/be3f2f2d-7f4a-4567-a483-a53ed827b9b4 {"id":"be3f2f2d-7f4a-4567-a483-a53ed827b9b4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832015} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f3bef9d8 be3f2f2d-7f4a-4567-a483-a53ed827b9b4 /tidb/cdc/default/default/upstream/7365147357787525640 {"id":7365147357787525640,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/be3f2f2d-7f4a-4567-a483-a53ed827b9b4 {"id":"be3f2f2d-7f4a-4567-a483-a53ed827b9b4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832015} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f3bef9d8 be3f2f2d-7f4a-4567-a483-a53ed827b9b4 /tidb/cdc/default/default/upstream/7365147357787525640 {"id":7365147357787525640,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/be3f2f2d-7f4a-4567-a483-a53ed827b9b4 {"id":"be3f2f2d-7f4a-4567-a483-a53ed827b9b4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832015} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f3bef9d8 be3f2f2d-7f4a-4567-a483-a53ed827b9b4 /tidb/cdc/default/default/upstream/7365147357787525640 {"id":7365147357787525640,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_topics_v2.cli.10375.out cli changefeed create --start-ts=449532923469889537 '--sink-uri=kafka://127.0.0.1:9092/multi_topics?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1' --config /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_topics_v2/conf/changefeed.toml check diff successfully [2024/05/04 22:13:38.907 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 22:13:38.914 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:38.995 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:39.100 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 22:13:39.118 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_basic.cli.7398.out cli changefeed resume -c simple-basic + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:13:38 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c77398d8-a930-4eea-a5e7-a8bb509aed4c {"id":"c77398d8-a930-4eea-a5e7-a8bb509aed4c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832016} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f3c988cc c77398d8-a930-4eea-a5e7-a8bb509aed4c /tidb/cdc/default/default/upstream/7365147370499757104 {"id":7365147370499757104,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c77398d8-a930-4eea-a5e7-a8bb509aed4c {"id":"c77398d8-a930-4eea-a5e7-a8bb509aed4c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832016} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f3c988cc c77398d8-a930-4eea-a5e7-a8bb509aed4c /tidb/cdc/default/default/upstream/7365147370499757104 {"id":7365147370499757104,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c77398d8-a930-4eea-a5e7-a8bb509aed4c {"id":"c77398d8-a930-4eea-a5e7-a8bb509aed4c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832016} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f3c988cc c77398d8-a930-4eea-a5e7-a8bb509aed4c /tidb/cdc/default/default/upstream/7365147370499757104 {"id":7365147370499757104,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 22:13:39 CST 2024] <<<<<< START kafka consumer in processor_stop_delay case >>>>>> [2024/05/04 22:13:39.190 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 22:13:39.266 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 22:13:39.272 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 22:13:39.301 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 22:13:39.322 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs4e66aaab_2f74_4589_8e3a_ed381ae107ef"] Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Debugger for raftstore-v2 is used Debugger for raftstore-v2 is used Debugger for raftstore-v2 is used Debugger for raftstore-v2 is used table processor_stop_delay.t not exists for 1-th check, retry later [Sat May 4 22:13:39 CST 2024] <<<<<< START cdc server in force_replicate_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.force_replicate_table.58915893.out server --log-file /tmp/tidb_cdc_test/force_replicate_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/force_replicate_table/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [2024/05/04 22:13:39.427 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 22:13:39.454 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] PASS coverage: 2.1% of statements in github.com/pingcap/tiflow/... Create changefeed successfully! ID: f2a1c868-1dca-4117-b728-c6297cc22593 Info: {"upstream_id":7365147357787525640,"namespace":"default","id":"f2a1c868-1dca-4117-b728-c6297cc22593","sink_uri":"kafka://127.0.0.1:9092/multi_topics?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1","create_time":"2024-05-04T22:13:39.540406343+08:00","start_ts":449532923469889537,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"dispatchers":[{"matcher":["test.*"],"topic":"{schema}_{table}"}],"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532923469889537,"checkpoint_ts":449532923469889537,"checkpoint_time":"2024-05-04 22:13:33.969"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/capture' 'capture' + key_prefix=/tidb/cdc/default/__cdc_meta__/capture + message=capture ++ etcdctl get /tidb/cdc/default/__cdc_meta__/capture --prefix --keys-only + info= + [[ '' =~ capture ]] + echo 'check pass' check pass + exit 0 run task successfully check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/owner' 'owner' + key_prefix=/tidb/cdc/default/__cdc_meta__/owner + message=owner ++ etcdctl get /tidb/cdc/default/__cdc_meta__/owner --prefix --keys-only + info= + [[ '' =~ owner ]] + echo 'check pass' check pass + exit 0 run task successfully [Sat May 4 22:13:39 CST 2024] <<<<<< START cdc server in changefeed_error case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/NewChangefeedRetryError=return(true)' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.62346236.out server --log-file /tmp/tidb_cdc_test/changefeed_error/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_error/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [2024/05/04 22:13:39.678 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:39.689 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:39.766 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs478eeb66_6c9d_4d39_a1e5_7ae2113af1ef"] [2024/05/04 22:13:39.774 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs2705bf3e_b915_493b_8b4d_d940e8d1186c"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:13:39.907 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs4538413d_cc85_433b_9cd6_e0b8a77f0b45"] [2024/05/04 22:13:39.989 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:39.999 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:40.076 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:40.080 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:40.088 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:40.111 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs2c7fa250_5eb5_4d8f_95d2_38718c7055d1"] [2024/05/04 22:13:40.125 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:40.252 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:40.373 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] table multi_capture_4.usertable exists check diff failed 1-th time, retry later [2024/05/04 22:13:40.435 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:40.435 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:40.454 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 22:13:40.552 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] table common_1.finish_mark not exists for 2-th check, retry later check diff failed 1-th time, retry later [2024/05/04 22:13:40.714 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:40.780 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:40.780 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 22:13:40.795 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 22:13:40.882 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_messages/run.sh using Sink-Type: kafka... <<================= [Sat May 4 22:13:40 CST 2024] <<<<<< run test case kafka_messages success! >>>>>> + set +x [2024/05/04 22:13:40.932 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:41.020 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:41.028 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] + set +x Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... [2024/05/04 22:13:41.212 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:41.218 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:41.225 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 22:13:41.225 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:41.226 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] table processor_stop_delay.t not exists for 2-th check, retry later Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:13:41.430 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:41.431 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:41.466 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:41.535 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:41.536 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:41.625 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:41.633 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 22:13:41.635 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:41.635 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:41.644 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 22:13:41.829 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:41.834 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:41.937 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:41.971 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:41.986 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:42.053 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:42.060 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:42.061 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 22:13:42.061 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:42.067 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 22:13:42.151 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:42.155 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:13:42 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b880b2c3-47df-4b40-98d7-56d96ac45f85 {"id":"b880b2c3-47df-4b40-98d7-56d96ac45f85","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832019} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 3 /tidb/cdc/default/__cdc_meta__/owner/22318f43f36d6cdd b880b2c3-47df-4b40-98d7-56d96ac45f85 /tidb/cdc/default/default/changefeed/info/changefeed-error {"upstream-id":7365147271517687926,"namespace":"default","changefeed-id":"changefeed-error","sink-uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-8220?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:13:18.819699279+08:00","start-ts":449532918127919106,"target-ts":0,"admin-job-type":1,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"pending","error":{"time":"2024-05-04T22:13:39.635613203+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrOwnerUnknown","message":"failpoint injected retriable error"},"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532924943663110} /tidb/cdc/default/default/changefeed/status/changefeed-error {"checkpoint-ts":449532921378504709,"min-table-barrier-ts":449532921378504709,"admin-job-type":1} /tidb/cdc/default/default/upstream/7365147271517687926 {"id":7365147271517687926,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b880b2c3-47df-4b40-98d7-56d96ac45f85 {"id":"b880b2c3-47df-4b40-98d7-56d96ac45f85","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832019} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 3 /tidb/cdc/default/__cdc_meta__/owner/22318f43f36d6cdd b880b2c3-47df-4b40-98d7-56d96ac45f85 /tidb/cdc/default/default/changefeed/info/changefeed-error {"upstream-id":7365147271517687926,"namespace":"default","changefeed-id":"changefeed-error","sink-uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-8220?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:13:18.819699279+08:00","start-ts":449532918127919106,"target-ts":0,"admin-job-type":1,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"pending","error":{"time":"2024-05-04T22:13:39.635613203+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrOwnerUnknown","message":"failpoint injected retriable error"},"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532924943663110} /tidb/cdc/default/default/changefeed/status/changefeed-error {"checkpoint-ts":449532921378504709,"min-table-barrier-ts":449532921378504709,"admin-job-type":1} /tidb/cdc/default/default/upstream/7365147271517687926 {"id":7365147271517687926,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b880b2c3-47df-4b40-98d7-56d96ac45f85 {"id":"b880b2c3-47df-4b40-98d7-56d96ac45f85","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832019} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 3 /tidb/cdc/default/__cdc_meta__/owner/22318f43f36d6cdd b880b2c3-47df-4b40-98d7-56d96ac45f85 /tidb/cdc/default/default/changefeed/info/changefeed-error {"upstream-id":7365147271517687926,"namespace":"default","changefeed-id":"changefeed-error","sink-uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-8220?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:13:18.819699279+08:00","start-ts":449532918127919106,"target-ts":0,"admin-job-type":1,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"pending","error":{"time":"2024-05-04T22:13:39.635613203+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrOwnerUnknown","message":"failpoint injected retriable error"},"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532924943663110} /tidb/cdc/default/default/changefeed/status/changefeed-error {"checkpoint-ts":449532921378504709,"min-table-barrier-ts":449532921378504709,"admin-job-type":1} /tidb/cdc/default/default/upstream/7365147271517687926 {"id":7365147271517687926,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x check_changefeed_state http://127.0.0.1:2379 changefeed-error warning failpoint injected retriable error + endpoints=http://127.0.0.1:2379 + changefeed_id=changefeed-error + expected_state=warning + error_msg=failpoint + tls_dir=error + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-error -s check diff successfully ***************** properties ***************** "mysql.user"="root" "dotransactions"="false" "readproportion"="0" "scanproportion"="0" "threadcount"="2" "mysql.host"="127.0.0.1" "operationcount"="0" "mysql.port"="4000" "workload"="core" "recordcount"="20" "insertproportion"="0" "requestdistribution"="uniform" "readallfields"="true" "mysql.db"="multi_capture_1" "updateproportion"="0" ********************************************** Run finished, takes 11.316606ms INSERT - Takes(s): 0.0, Count: 20, OPS: 2033.0, Avg(us): 1075, Min(us): 606, Max(us): 1834, 95th(us): 2000, 99th(us): 2000 ***************** properties ***************** "operationcount"="0" "requestdistribution"="uniform" "readproportion"="0" "mysql.port"="4000" "workload"="core" "insertproportion"="0" "mysql.user"="root" "scanproportion"="0" "mysql.host"="127.0.0.1" "threadcount"="2" "mysql.db"="multi_capture_2" "dotransactions"="false" "updateproportion"="0" "recordcount"="20" "readallfields"="true" ********************************************** Run finished, takes 29.35583ms INSERT - Takes(s): 0.0, Count: 20, OPS: 989.5, Avg(us): 2867, Min(us): 889, Max(us): 9089, 95th(us): 10000, 99th(us): 10000 ***************** properties ***************** "mysql.host"="127.0.0.1" "readproportion"="0" "scanproportion"="0" "recordcount"="20" "requestdistribution"="uniform" "mysql.db"="multi_capture_3" "dotransactions"="false" "readallfields"="true" "operationcount"="0" "threadcount"="2" "insertproportion"="0" "mysql.user"="root" "updateproportion"="0" "workload"="core" "mysql.port"="4000" ********************************************** Run finished, takes 11.100165ms INSERT - Takes(s): 0.0, Count: 20, OPS: 2080.3, Avg(us): 1059, Min(us): 491, Max(us): 1876, 95th(us): 2000, 99th(us): 2000 ***************** properties ***************** "mysql.host"="127.0.0.1" "insertproportion"="0" "mysql.db"="multi_capture_4" "scanproportion"="0" "workload"="core" "mysql.port"="4000" "readallfields"="true" "operationcount"="0" "threadcount"="2" "requestdistribution"="uniform" "readproportion"="0" "mysql.user"="root" "recordcount"="20" "updateproportion"="0" "dotransactions"="false" ********************************************** Run finished, takes 12.021801ms INSERT - Takes(s): 0.0, Count: 20, OPS: 1866.6, Avg(us): 1043, Min(us): 500, Max(us): 1855, 95th(us): 2000, 99th(us): 2000 [2024/05/04 22:13:42.243 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd05ad40011 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-qf5bw-s1gf0, pid:10841, start at 2024-05-04 22:13:41.191543124 +0800 CST m=+5.325838398 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:41.199 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:41.173 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:41.173 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + info='{ "upstream_id": 7365147271517687926, "namespace": "default", "id": "changefeed-error", "state": "warning", "checkpoint_tso": 449532921378504709, "checkpoint_time": "2024-05-04 22:13:25.991", "error": { "time": "2024-05-04T22:13:39.635613203+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrOwnerUnknown", "message": "failpoint injected retriable error" } }' + echo '{ "upstream_id": 7365147271517687926, "namespace": "default", "id": "changefeed-error", "state": "warning", "checkpoint_tso": 449532921378504709, "checkpoint_time": "2024-05-04 22:13:25.991", "error": { "time": "2024-05-04T22:13:39.635613203+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrOwnerUnknown", "message": "failpoint injected retriable error" } }' { "upstream_id": 7365147271517687926, "namespace": "default", "id": "changefeed-error", "state": "warning", "checkpoint_tso": 449532921378504709, "checkpoint_time": "2024-05-04 22:13:25.991", "error": { "time": "2024-05-04T22:13:39.635613203+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrOwnerUnknown", "message": "failpoint injected retriable error" } } ++ echo '{' '"upstream_id":' 7365147271517687926, '"namespace":' '"default",' '"id":' '"changefeed-error",' '"state":' '"warning",' '"checkpoint_tso":' 449532921378504709, '"checkpoint_time":' '"2024-05-04' '22:13:25.991",' '"error":' '{' '"time":' '"2024-05-04T22:13:39.635613203+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrOwnerUnknown",' '"message":' '"failpoint' injected retriable 'error"' '}' '}' ++ jq -r .state + state=warning + [[ ! warning == \w\a\r\n\i\n\g ]] ++ echo '{' '"upstream_id":' 7365147271517687926, '"namespace":' '"default",' '"id":' '"changefeed-error",' '"state":' '"warning",' '"checkpoint_tso":' 449532921378504709, '"checkpoint_time":' '"2024-05-04' '22:13:25.991",' '"error":' '{' '"time":' '"2024-05-04T22:13:39.635613203+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrOwnerUnknown",' '"message":' '"failpoint' injected retriable 'error"' '}' '}' ++ jq -r .error.message + message='failpoint injected retriable error' + [[ ! failpoint injected retriable error =~ failpoint ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6344.out cli changefeed remove -c changefeed-error table common_1.finish_mark not exists for 3-th check, retry later check diff failed 1-th time, retry later [2024/05/04 22:13:42.454 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:42.459 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:42.520 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:42.522 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 22:13:42.524 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:42.527 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:42.529 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 22:13:42.593 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:42.602 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:42.622 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:31.597 +08:00] [INFO] [pd_service_discovery.go:1016] ["[pd] switch leader"] [new-leader=http://127.0.0.1:2379] [old-leader=] [2024/05/04 22:13:31.597 +08:00] [INFO] [pd_service_discovery.go:498] ["[pd] init cluster id"] [cluster-id=7365146860202399852] [2024/05/04 22:13:31.598 +08:00] [INFO] [client.go:606] ["[pd] changing service mode"] [old-mode=UNKNOWN_SVC_MODE] [new-mode=PD_SVC_MODE] [2024/05/04 22:13:31.598 +08:00] [INFO] [tso_client.go:236] ["[tso] switch dc tso global allocator serving url"] [dc-location=global] [new-url=http://127.0.0.1:2379] [2024/05/04 22:13:31.598 +08:00] [INFO] [tso_dispatcher.go:359] ["[tso] tso dispatcher created"] [dc-location=global] [2024/05/04 22:13:31.598 +08:00] [INFO] [client.go:612] ["[pd] service mode changed"] [old-mode=UNKNOWN_SVC_MODE] [new-mode=PD_SVC_MODE] [2024/05/04 22:13:31.599 +08:00] [INFO] [pd_service_discovery.go:1016] ["[pd] switch leader"] [new-leader=http://127.0.0.1:2379] [old-leader=] [2024/05/04 22:13:31.599 +08:00] [INFO] [pd_service_discovery.go:498] ["[pd] init cluster id"] [cluster-id=7365146860202399852] [2024/05/04 22:13:31.600 +08:00] [INFO] [client.go:606] ["[pd] changing service mode"] [old-mode=UNKNOWN_SVC_MODE] [new-mode=PD_SVC_MODE] [2024/05/04 22:13:31.600 +08:00] [INFO] [tso_client.go:236] ["[tso] switch dc tso global allocator serving url"] [dc-location=global] [new-url=http://127.0.0.1:2379] [2024/05/04 22:13:31.600 +08:00] [INFO] [tso_dispatcher.go:359] ["[tso] tso dispatcher created"] [dc-location=global] [2024/05/04 22:13:31.600 +08:00] [INFO] [client.go:612] ["[pd] service mode changed"] [old-mode=UNKNOWN_SVC_MODE] [new-mode=PD_SVC_MODE] [2024/05/04 22:13:31.601 +08:00] [INFO] [tikv_driver.go:197] ["using API V1."] [2024/05/04 22:13:31.601 +08:00] [INFO] [main.go:180] ["genLock started"] [2024/05/04 22:13:31.602 +08:00] [INFO] [store_cache.go:477] ["change store resolve state"] [store=7] [addr=127.0.0.1:20161] [from=unresolved] [to=resolved] [liveness-state=reachable] [2024/05/04 22:13:41.605 +08:00] [INFO] [main.go:196] ["genLock done"] [2024/05/04 22:13:41.605 +08:00] [INFO] [tso_dispatcher.go:268] ["exit tso dispatcher loop"] [2024/05/04 22:13:41.605 +08:00] [INFO] [tso_dispatcher.go:214] ["exit tso requests cancel loop"] [2024/05/04 22:13:41.605 +08:00] [INFO] [pd_service_discovery.go:550] ["[pd] exit member loop due to context canceled"] [2024/05/04 22:13:41.605 +08:00] [INFO] [tso_dispatcher.go:455] ["[tso] stop fetching the pending tso requests due to context canceled"] [dc-location=global] [2024/05/04 22:13:41.605 +08:00] [INFO] [tso_dispatcher.go:380] ["[tso] exit tso dispatcher"] [dc-location=global] [2024/05/04 22:13:41.605 +08:00] [INFO] [tso_batch_controller.go:158] ["[pd] clear the tso batch controller"] [max-batch-size=10000] [best-batch-size=1] [collected-request-count=0] [pending-request-count=0] [2024/05/04 22:13:41.605 +08:00] [INFO] [resource_manager_client.go:295] ["[resource manager] exit resource token dispatcher"] [2024/05/04 22:13:41.605 +08:00] [INFO] [tso_client.go:140] ["closing tso client"] [2024/05/04 22:13:41.605 +08:00] [INFO] [tso_client.go:145] ["close tso client"] [2024/05/04 22:13:41.605 +08:00] [INFO] [tso_batch_controller.go:158] ["[pd] clear the tso batch controller"] [max-batch-size=10000] [best-batch-size=1] [collected-request-count=0] [pending-request-count=0] [2024/05/04 22:13:41.605 +08:00] [INFO] [tso_client.go:155] ["tso client is closed"] [2024/05/04 22:13:41.605 +08:00] [INFO] [pd_service_discovery.go:637] ["[pd] close pd service discovery client"] + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:13:42 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/7d20d758-0a0d-47fa-beb4-d8ba55554d15 {"id":"7d20d758-0a0d-47fa-beb4-d8ba55554d15","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832019} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f3d26acb 7d20d758-0a0d-47fa-beb4-d8ba55554d15 /tidb/cdc/default/default/upstream/7365147383105321431 {"id":7365147383105321431,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/7d20d758-0a0d-47fa-beb4-d8ba55554d15 {"id":"7d20d758-0a0d-47fa-beb4-d8ba55554d15","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832019} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f3d26acb 7d20d758-0a0d-47fa-beb4-d8ba55554d15 /tidb/cdc/default/default/upstream/7365147383105321431 {"id":7365147383105321431,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/7d20d758-0a0d-47fa-beb4-d8ba55554d15 {"id":"7d20d758-0a0d-47fa-beb4-d8ba55554d15","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832019} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f3d26acb 7d20d758-0a0d-47fa-beb4-d8ba55554d15 /tidb/cdc/default/default/upstream/7365147383105321431 {"id":7365147383105321431,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: 7fc177af-05c3-4963-b248-2caeec61df5c Info: {"upstream_id":7365147383105321431,"namespace":"default","id":"7fc177af-05c3-4963-b248-2caeec61df5c","sink_uri":"kafka://127.0.0.1:9092/ticdc-force_replicate_table-test-8907?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:13:42.536914707+08:00","start_ts":449532924839854081,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":true,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532924839854081,"checkpoint_ts":449532924839854081,"checkpoint_time":"2024-05-04 22:13:39.195"} [Sat May 4 22:13:42 CST 2024] <<<<<< START kafka consumer in force_replicate_table case >>>>>> consumer replica config found: /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/force_replicate_table/conf/changefeed.toml [2024/05/04 22:13:42.814 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:42.833 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:42.936 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 22:13:42.940 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:42.953 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 22:13:43.017 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:43.030 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:43.041 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:43.139 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:43.155 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] check diff failed 2-th time, retry later table processor_stop_delay.t exists [2024/05/04 22:13:43.333 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 22:13:43.356 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 22:13:43.429 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:43.432 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Changefeed remove successfully. ID: changefeed-error CheckpointTs: 449532921378504709 SinkURI: kafka://127.0.0.1:9092/ticdc-sink-retry-test-8220?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... check diff successfully [2024/05/04 22:13:43.487 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 22:13:43.491 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:43.632 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 22:13:43.660 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 22:13:43.747 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 22:13:43.748 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 22:13:43.799 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:43.805 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 22:13:44.036 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 22:13:44.036 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] table force_replicate_table.t0 not exists for 1-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_sink_error_resume/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd05ad40011 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-qf5bw-s1gf0, pid:10841, start at 2024-05-04 22:13:41.191543124 +0800 CST m=+5.325838398 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:41.199 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:41.173 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:41.173 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd05c380015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-qf5bw-s1gf0, pid:10922, start at 2024-05-04 22:13:41.298860695 +0800 CST m=+5.384049470 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:41.308 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:41.312 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:41.312 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/changefeed_auto_stop/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/changefeed_auto_stop/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_auto_stop/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_auto_stop/tiflash-proxy.toml"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_auto_stop/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [2024/05/04 22:13:44.327 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 22:13:44.333 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] table common_1.finish_mark not exists for 4-th check, retry later [2024/05/04 22:13:44.596 +08:00] [INFO] [main.go:812] ["testMultiDDLs take %v44.2173531s"] [2024/05/04 22:13:44.677 +08:00] [INFO] [main.go:74] ["DefaultValue integration tests take 44.298117485s"] + set +x check_no_changefeed 127.0.0.1:2379 parse error: Invalid numeric literal at line 1, column 6 run task successfully check diff successfully wait process cdc.test exit for 1-th time... check diff failed 3-th time, retry later table mark.finish_mark_1 exists table mark.finish_mark_2 not exists for 1-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 1-th time... check diff failed 1-th time, retry later wait process cdc.test exit for 2-th time... wait process cdc.test exit for 2-th time... wait process cdc.test exit for 3-th time... table test.finish_mark not exists for 1-th check, retry later table common_1.finish_mark exists cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/owner' 'owner' + key_prefix=/tidb/cdc/default/__cdc_meta__/owner + message=owner ++ etcdctl get /tidb/cdc/default/__cdc_meta__/owner --prefix --keys-only + info= + [[ '' =~ owner ]] + echo 'check pass' check pass + exit 0 run task successfully [Sat May 4 22:13:46 CST 2024] <<<<<< START cdc server in changefeed_error case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/InjectChangefeedDDLError=return(true)' + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.64336435.out server --log-file /tmp/tidb_cdc_test/changefeed_error/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_error/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff successfully table force_replicate_table.t0 exists table force_replicate_table.t1 not exists for 1-th check, retry later cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 22:13:46 CST 2024] <<<<<< run test case multi_capture success! >>>>>> ***************** properties ***************** "threadcount"="4" "requestdistribution"="uniform" "readproportion"="0" "mysql.port"="4000" "mysql.user"="root" "workload"="core" "insertproportion"="0" "mysql.db"="changefeed_auto_stop_1" "scanproportion"="0" "operationcount"="0" "dotransactions"="false" "readallfields"="true" "recordcount"="20" "mysql.host"="127.0.0.1" "updateproportion"="0" ********************************************** Run finished, takes 9.619433ms INSERT - Takes(s): 0.0, Count: 20, OPS: 3542.8, Avg(us): 1822, Min(us): 1088, Max(us): 3921, 95th(us): 4000, 99th(us): 4000 ***************** properties ***************** "mysql.host"="127.0.0.1" "mysql.user"="root" "updateproportion"="0" "readproportion"="0" "threadcount"="4" "dotransactions"="false" "operationcount"="0" "insertproportion"="0" "recordcount"="20" "workload"="core" "requestdistribution"="uniform" "scanproportion"="0" "mysql.db"="changefeed_auto_stop_2" "mysql.port"="4000" "readallfields"="true" ********************************************** Run finished, takes 8.719827ms INSERT - Takes(s): 0.0, Count: 20, OPS: 4088.9, Avg(us): 1636, Min(us): 937, Max(us): 3761, 95th(us): 4000, 99th(us): 4000 ***************** properties ***************** "threadcount"="4" "mysql.host"="127.0.0.1" "insertproportion"="0" "mysql.user"="root" "workload"="core" "mysql.port"="4000" "readallfields"="true" "scanproportion"="0" "operationcount"="0" "mysql.db"="changefeed_auto_stop_3" "dotransactions"="false" "updateproportion"="0" "recordcount"="20" "readproportion"="0" "requestdistribution"="uniform" ********************************************** Run finished, takes 8.919176ms INSERT - Takes(s): 0.0, Count: 20, OPS: 3981.3, Avg(us): 1692, Min(us): 979, Max(us): 3878, 95th(us): 4000, 99th(us): 4000 check diff failed 4-th time, retry later table mark.finish_mark_2 not exists for 2-th check, retry later ***************** properties ***************** "threadcount"="4" "workload"="core" "requestdistribution"="uniform" "insertproportion"="0" "mysql.host"="127.0.0.1" "dotransactions"="false" "updateproportion"="0" "scanproportion"="0" "mysql.db"="changefeed_auto_stop_4" "mysql.port"="4000" "mysql.user"="root" "readproportion"="0" "recordcount"="20" "readallfields"="true" "operationcount"="0" ********************************************** Run finished, takes 9.349416ms INSERT - Takes(s): 0.0, Count: 20, OPS: 3789.5, Avg(us): 1787, Min(us): 1094, Max(us): 4093, 95th(us): 5000, 99th(us): 5000 [Sat May 4 22:13:47 CST 2024] <<<<<< START cdc server in changefeed_auto_stop case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_auto_stop.1253112533.out server --log-file /tmp/tidb_cdc_test/changefeed_auto_stop/cdc1.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_auto_stop/cdc_data1 --cluster-id default --addr 127.0.0.1:8301 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8301 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8301; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... start tidb cluster in /tmp/tidb_cdc_test/kafka_sink_error_resume Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.t2 not exists for 1-th check, retry later check diff failed 2-th time, retry later wait process cdc.test exit for 3-th time... table test.finish_mark not exists for 2-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/capture_session_done_during_task/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 22:13:48 CST 2024] <<<<<< run test case common_1 success! >>>>>> table force_replicate_table.t1 exists table force_replicate_table.t2 not exists for 1-th check, retry later table test.t2 not exists for 2-th check, retry later check diff failed 5-th time, retry later table mark.finish_mark_2 not exists for 3-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:13:49 GMT < Content-Length: 883 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/347b6dea-41bb-42c0-aae8-63eaec72ae8d {"id":"347b6dea-41bb-42c0-aae8-63eaec72ae8d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832026} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 4 /tidb/cdc/default/__cdc_meta__/owner/22318f43f36d6d2b 347b6dea-41bb-42c0-aae8-63eaec72ae8d /tidb/cdc/default/default/upstream/7365147271517687926 {"id":7365147271517687926,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/347b6dea-41bb-42c0-aae8-63eaec72ae8d {"id":"347b6dea-41bb-42c0-aae8-63eaec72ae8d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832026} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 4 /tidb/cdc/default/__cdc_meta__/owner/22318f43f36d6d2b 347b6dea-41bb-42c0-aae8-63eaec72ae8d /tidb/cdc/default/default/upstream/7365147271517687926 {"id":7365147271517687926,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/347b6dea-41bb-42c0-aae8-63eaec72ae8d {"id":"347b6dea-41bb-42c0-aae8-63eaec72ae8d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832026} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 4 /tidb/cdc/default/__cdc_meta__/owner/22318f43f36d6d2b 347b6dea-41bb-42c0-aae8-63eaec72ae8d /tidb/cdc/default/default/upstream/7365147271517687926 {"id":7365147271517687926,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6488.out cli changefeed create --start-ts=449532918127919106 '--sink-uri=kafka://127.0.0.1:9092/ticdc-sink-retry-test-8220?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c changefeed-error-1 check diff failed 3-th time, retry later Create changefeed successfully! ID: changefeed-error-1 Info: {"upstream_id":7365147271517687926,"namespace":"default","id":"changefeed-error-1","sink_uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-8220?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:13:49.81321832+08:00","start_ts":449532918127919106,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532918127919106,"checkpoint_ts":449532918127919106,"checkpoint_time":"2024-05-04 22:13:13.591"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... table test.finish_mark not exists for 3-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8301 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8301 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8301 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:13:50 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5e94b29e-18e9-43ee-aa9f-404eae05e0e4 {"id":"5e94b29e-18e9-43ee-aa9f-404eae05e0e4","address":"127.0.0.1:8301","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832027} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f3ee1235 5e94b29e-18e9-43ee-aa9f-404eae05e0e4 /tidb/cdc/default/default/upstream/7365147411438037327 {"id":7365147411438037327,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5e94b29e-18e9-43ee-aa9f-404eae05e0e4 {"id":"5e94b29e-18e9-43ee-aa9f-404eae05e0e4","address":"127.0.0.1:8301","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832027} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f3ee1235 5e94b29e-18e9-43ee-aa9f-404eae05e0e4 /tidb/cdc/default/default/upstream/7365147411438037327 {"id":7365147411438037327,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5e94b29e-18e9-43ee-aa9f-404eae05e0e4 {"id":"5e94b29e-18e9-43ee-aa9f-404eae05e0e4","address":"127.0.0.1:8301","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832027} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f3ee1235 5e94b29e-18e9-43ee-aa9f-404eae05e0e4 /tidb/cdc/default/default/upstream/7365147411438037327 {"id":7365147411438037327,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 22:13:50 CST 2024] <<<<<< START cdc server in changefeed_auto_stop case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/pipeline/ProcessorSyncResolvedError=1*return(true);github.com/pingcap/tiflow/cdc/processor/ProcessorUpdatePositionDelaying=sleep(1000)' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_auto_stop.1258812590.out server --log-file /tmp/tidb_cdc_test/changefeed_auto_stop/cdc2.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_auto_stop/cdc_data2 --cluster-id default --addr 127.0.0.1:8302 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8302 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8302; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table force_replicate_table.t2 exists table force_replicate_table.t3 not exists for 1-th check, retry later table test.t2 not exists for 3-th check, retry later check diff successfully + set +x check_changefeed_status 127.0.0.1:8300 changefeed-error-1 warning last_warning ErrExecDDLFailed + endpoint=127.0.0.1:8300 + changefeed_id=changefeed-error-1 + expected_state=warning + field=last_warning + error_pattern=ErrExecDDLFailed ++ curl 127.0.0.1:8300/api/v2/changefeeds/changefeed-error-1/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed table mark.finish_mark_2 not exists for 4-th check, retry later 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 253 100 253 0 0 2410 0 --:--:-- --:--:-- --:--:-- 2409 100 253 100 253 0 0 2408 0 --:--:-- --:--:-- --:--:-- 2409 + info='{"state":"warning","resolved_ts":449532918508027933,"checkpoint_ts":449532918508027933,"last_warning":{"time":"2024-05-04T22:13:51.244686576+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrExecDDLFailed","message":"[CDC:ErrExecDDLFailed]exec DDL failed"}}' + echo '{"state":"warning","resolved_ts":449532918508027933,"checkpoint_ts":449532918508027933,"last_warning":{"time":"2024-05-04T22:13:51.244686576+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrExecDDLFailed","message":"[CDC:ErrExecDDLFailed]exec DDL failed"}}' {"state":"warning","resolved_ts":449532918508027933,"checkpoint_ts":449532918508027933,"last_warning":{"time":"2024-05-04T22:13:51.244686576+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrExecDDLFailed","message":"[CDC:ErrExecDDLFailed]exec DDL failed"}} ++ jq -r .state ++ echo '{"state":"warning","resolved_ts":449532918508027933,"checkpoint_ts":449532918508027933,"last_warning":{"time":"2024-05-04T22:13:51.244686576+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrExecDDLFailed","message":"[CDC:ErrExecDDLFailed]exec' DDL 'failed"}}' + state=warning + [[ ! warning == \w\a\r\n\i\n\g ]] + [[ -z last_warning ]] ++ jq -r .last_warning.message ++ echo '{"state":"warning","resolved_ts":449532918508027933,"checkpoint_ts":449532918508027933,"last_warning":{"time":"2024-05-04T22:13:51.244686576+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrExecDDLFailed","message":"[CDC:ErrExecDDLFailed]exec' DDL 'failed"}}' + error_msg='[CDC:ErrExecDDLFailed]exec DDL failed' + [[ ! [CDC:ErrExecDDLFailed]exec DDL failed =~ ErrExecDDLFailed ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6538.out cli changefeed remove -c changefeed-error-1 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release check diff failed 4-th time, retry later table test.finish_mark not exists for 4-th check, retry later table force_replicate_table.t3 exists table force_replicate_table.t4 not exists for 1-th check, retry later Changefeed remove successfully. ID: changefeed-error-1 CheckpointTs: 449532918508027933 SinkURI: kafka://127.0.0.1:9092/ticdc-sink-retry-test-8220?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... check diff failed 1-th time, retry later Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table mark.finish_mark_2 not exists for 5-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/capture_session_done_during_task Starting Upstream PD... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.t2 not exists for 4-th check, retry later Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8302 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8302 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8302 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:13:53 GMT < Content-Length: 1271 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5e94b29e-18e9-43ee-aa9f-404eae05e0e4 {"id":"5e94b29e-18e9-43ee-aa9f-404eae05e0e4","address":"127.0.0.1:8301","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832027} /tidb/cdc/default/__cdc_meta__/capture/73ead227-2beb-45be-a24d-d6557c9e2e9f {"id":"73ead227-2beb-45be-a24d-d6557c9e2e9f","address":"127.0.0.1:8302","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832030} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f3ee1235 5e94b29e-18e9-43ee-aa9f-404eae05e0e4 /tidb/cdc/default/__cdc_meta__/owner/22318f43f3ee1268 73ead227-2beb-45be-a24d-d6557c9e2e9f /tidb/cdc/default/default/upstream/7365147411438037327 {"id":7365147411438037327,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5e94b29e-18e9-43ee-aa9f-404eae05e0e4 {"id":"5e94b29e-18e9-43ee-aa9f-404eae05e0e4","address":"127.0.0.1:8301","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832027} /tidb/cdc/default/__cdc_meta__/capture/73ead227-2beb-45be-a24d-d6557c9e2e9f {"id":"73ead227-2beb-45be-a24d-d6557c9e2e9f","address":"127.0.0.1:8302","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832030} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f3ee1235 5e94b29e-18e9-43ee-aa9f-404eae05e0e4 /tidb/cdc/default/__cdc_meta__/owner/22318f43f3ee1268 73ead227-2beb-45be-a24d-d6557c9e2e9f /tidb/cdc/default/default/upstream/7365147411438037327 {"id":7365147411438037327,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + grep -q 'etcd info' + echo ' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5e94b29e-18e9-43ee-aa9f-404eae05e0e4 {"id":"5e94b29e-18e9-43ee-aa9f-404eae05e0e4","address":"127.0.0.1:8301","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832027} /tidb/cdc/default/__cdc_meta__/capture/73ead227-2beb-45be-a24d-d6557c9e2e9f {"id":"73ead227-2beb-45be-a24d-d6557c9e2e9f","address":"127.0.0.1:8302","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832030} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f3ee1235 5e94b29e-18e9-43ee-aa9f-404eae05e0e4 /tidb/cdc/default/__cdc_meta__/owner/22318f43f3ee1268 73ead227-2beb-45be-a24d-d6557c9e2e9f /tidb/cdc/default/default/upstream/7365147411438037327 {"id":7365147411438037327,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + break + set +x [Sat May 4 22:13:53 CST 2024] <<<<<< START kafka consumer in changefeed_auto_stop case >>>>>> check_changefeed_state http://127.0.0.1:2379 2de772c4-c047-4aaa-a3e1-56fcc45fc92c normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=2de772c4-c047-4aaa-a3e1-56fcc45fc92c + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 2de772c4-c047-4aaa-a3e1-56fcc45fc92c -s + info='{ "upstream_id": 7365147411438037327, "namespace": "default", "id": "2de772c4-c047-4aaa-a3e1-56fcc45fc92c", "state": "normal", "checkpoint_tso": 449532926682464259, "checkpoint_time": "2024-05-04 22:13:46.224", "error": null }' + echo '{ "upstream_id": 7365147411438037327, "namespace": "default", "id": "2de772c4-c047-4aaa-a3e1-56fcc45fc92c", "state": "normal", "checkpoint_tso": 449532926682464259, "checkpoint_time": "2024-05-04 22:13:46.224", "error": null }' { "upstream_id": 7365147411438037327, "namespace": "default", "id": "2de772c4-c047-4aaa-a3e1-56fcc45fc92c", "state": "normal", "checkpoint_tso": 449532926682464259, "checkpoint_time": "2024-05-04 22:13:46.224", "error": null } ++ echo '{' '"upstream_id":' 7365147411438037327, '"namespace":' '"default",' '"id":' '"2de772c4-c047-4aaa-a3e1-56fcc45fc92c",' '"state":' '"normal",' '"checkpoint_tso":' 449532926682464259, '"checkpoint_time":' '"2024-05-04' '22:13:46.224",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365147411438037327, '"namespace":' '"default",' '"id":' '"2de772c4-c047-4aaa-a3e1-56fcc45fc92c",' '"state":' '"normal",' '"checkpoint_tso":' 449532926682464259, '"checkpoint_time":' '"2024-05-04' '22:13:46.224",' '"error":' null '}' ++ jq -r .error.message + set +x + message=null + [[ ! null =~ null ]] run task successfully table changefeed_auto_stop_1.usertable not exists for 1-th check, retry later table force_replicate_table.t4 exists table force_replicate_table.t5 not exists for 1-th check, retry later check diff failed 5-th time, retry later wait process cdc.test exit for 1-th time... table test.finish_mark not exists for 5-th check, retry later wait process cdc.test exit for 2-th time... check diff failed 2-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/owner' 'owner' + key_prefix=/tidb/cdc/default/__cdc_meta__/owner + message=owner ++ etcdctl get /tidb/cdc/default/__cdc_meta__/owner --prefix --keys-only + info= + [[ '' =~ owner ]] + echo 'check pass' check pass + exit 0 run task successfully [Sat May 4 22:13:55 CST 2024] <<<<<< START cdc server in changefeed_error case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/pkg/txnutil/gc/InjectActualGCSafePoint=return(9223372036854775807)' + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.66026604.out server --log-file /tmp/tidb_cdc_test/changefeed_error/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_error/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.t2 not exists for 5-th check, retry later table changefeed_auto_stop_1.usertable not exists for 2-th check, retry later table mark.finish_mark_2 not exists for 6-th check, retry later check diff failed 6-th time, retry later table test.finish_mark not exists for 6-th check, retry later table force_replicate_table.t5 exists table force_replicate_table.t6 not exists for 1-th check, retry later table test.t2 not exists for 6-th check, retry later check diff failed 3-th time, retry later table changefeed_auto_stop_1.usertable exists ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table mark.finish_mark_2 not exists for 7-th check, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table changefeed_auto_stop_2.usertable not exists for 1-th check, retry later check diff successfully [2024/05/04 22:13:50.793 +08:00] [INFO] [main.go:86] ["running ddl test: 0 createDropSchemaDDL"] [2024/05/04 22:13:50.991 +08:00] [INFO] [main.go:220] ["0 insert success: 100"] [2024/05/04 22:13:50.991 +08:00] [INFO] [main.go:220] ["1 insert success: 100"] [2024/05/04 22:13:51.447 +08:00] [INFO] [main.go:220] ["0 insert success: 200"] [2024/05/04 22:13:51.449 +08:00] [INFO] [main.go:234] ["0 delete success: 100"] [2024/05/04 22:13:51.818 +08:00] [INFO] [main.go:220] ["1 insert success: 200"] [2024/05/04 22:13:51.824 +08:00] [INFO] [main.go:234] ["1 delete success: 100"] [2024/05/04 22:13:51.908 +08:00] [INFO] [main.go:220] ["0 insert success: 300"] [2024/05/04 22:13:52.293 +08:00] [INFO] [main.go:220] ["1 insert success: 300"] [2024/05/04 22:13:52.671 +08:00] [INFO] [main.go:220] ["0 insert success: 400"] [2024/05/04 22:13:52.681 +08:00] [INFO] [main.go:234] ["0 delete success: 200"] [2024/05/04 22:13:53.136 +08:00] [INFO] [main.go:220] ["0 insert success: 500"] [2024/05/04 22:13:53.190 +08:00] [INFO] [main.go:220] ["1 insert success: 400"] [2024/05/04 22:13:53.197 +08:00] [INFO] [main.go:234] ["1 delete success: 200"] [2024/05/04 22:13:53.627 +08:00] [INFO] [main.go:220] ["0 insert success: 600"] [2024/05/04 22:13:53.638 +08:00] [INFO] [main.go:234] ["0 delete success: 300"] [2024/05/04 22:13:53.680 +08:00] [INFO] [main.go:220] ["1 insert success: 500"] [2024/05/04 22:13:54.137 +08:00] [INFO] [main.go:220] ["0 insert success: 700"] [2024/05/04 22:13:54.173 +08:00] [INFO] [main.go:220] ["1 insert success: 600"] [2024/05/04 22:13:54.179 +08:00] [INFO] [main.go:234] ["1 delete success: 300"] [2024/05/04 22:13:54.648 +08:00] [INFO] [main.go:220] ["0 insert success: 800"] [2024/05/04 22:13:54.659 +08:00] [INFO] [main.go:234] ["0 delete success: 400"] [2024/05/04 22:13:54.660 +08:00] [INFO] [main.go:220] ["1 insert success: 700"] [2024/05/04 22:13:55.147 +08:00] [INFO] [main.go:220] ["1 insert success: 800"] [2024/05/04 22:13:55.150 +08:00] [INFO] [main.go:234] ["1 delete success: 400"] [2024/05/04 22:13:55.157 +08:00] [INFO] [main.go:220] ["0 insert success: 900"] [2024/05/04 22:13:55.620 +08:00] [INFO] [main.go:220] ["1 insert success: 900"] [2024/05/04 22:13:55.632 +08:00] [INFO] [main.go:220] ["0 insert success: 1000"] [2024/05/04 22:13:55.637 +08:00] [INFO] [main.go:234] ["0 delete success: 500"] [mysql] 2024/05/04 22:13:55 connection.go:299: invalid connection table test.finish_mark not exists for 7-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:13:58 GMT < Content-Length: 883 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/834b2615-5d27-4228-9173-d1f4ab1c860b {"id":"834b2615-5d27-4228-9173-d1f4ab1c860b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832035} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 5 /tidb/cdc/default/__cdc_meta__/owner/22318f43f36d6daa 834b2615-5d27-4228-9173-d1f4ab1c860b /tidb/cdc/default/default/upstream/7365147271517687926 {"id":7365147271517687926,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/834b2615-5d27-4228-9173-d1f4ab1c860b {"id":"834b2615-5d27-4228-9173-d1f4ab1c860b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832035} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 5 /tidb/cdc/default/__cdc_meta__/owner/22318f43f36d6daa 834b2615-5d27-4228-9173-d1f4ab1c860b /tidb/cdc/default/default/upstream/7365147271517687926 {"id":7365147271517687926,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/834b2615-5d27-4228-9173-d1f4ab1c860b {"id":"834b2615-5d27-4228-9173-d1f4ab1c860b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832035} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 5 /tidb/cdc/default/__cdc_meta__/owner/22318f43f36d6daa 834b2615-5d27-4228-9173-d1f4ab1c860b /tidb/cdc/default/default/upstream/7365147271517687926 {"id":7365147271517687926,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6650.out cli changefeed create --start-ts=449532918127919106 '--sink-uri=kafka://127.0.0.1:9092/ticdc-sink-retry-test-8220?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c changefeed-error-2 wait process cdc.test exit for 1-th time... table force_replicate_table.t6 exists check_data_subset force_replicate_table.t0 127.0.0.1 4000 127.0.0.1 3306 Create changefeed successfully! ID: changefeed-error-2 Info: {"upstream_id":7365147271517687926,"namespace":"default","id":"changefeed-error-2","sink_uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-8220?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:13:58.808685056+08:00","start_ts":449532918127919106,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532918127919106,"checkpoint_ts":449532918127919106,"checkpoint_time":"2024-05-04 22:13:13.591"} PASS =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/run.sh using Sink-Type: kafka... <<================= +++ dirname /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/run.sh ++ cd /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo ++ pwd + CUR=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo + source /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/../_utils/test_prepare ++ UP_TIDB_HOST=127.0.0.1 ++ UP_TIDB_PORT=4000 ++ UP_TIDB_OTHER_PORT=4001 ++ UP_TIDB_STATUS=10080 ++ UP_TIDB_OTHER_STATUS=10081 ++ DOWN_TIDB_HOST=127.0.0.1 ++ DOWN_TIDB_PORT=3306 ++ DOWN_TIDB_STATUS=20080 ++ TLS_TIDB_HOST=127.0.0.1 ++ TLS_TIDB_PORT=3307 ++ TLS_TIDB_STATUS=30080 ++ UP_PD_HOST_1=127.0.0.1 ++ UP_PD_PORT_1=2379 ++ UP_PD_PEER_PORT_1=2380 ++ UP_PD_HOST_2=127.0.0.1 ++ UP_PD_PORT_2=2679 ++ UP_PD_PEER_PORT_2=2680 ++ UP_PD_HOST_3=127.0.0.1 ++ UP_PD_PORT_3=2779 ++ UP_PD_PEER_PORT_3=2780 ++ DOWN_PD_HOST=127.0.0.1 ++ DOWN_PD_PORT=2479 ++ DOWN_PD_PEER_PORT=2480 ++ TLS_PD_HOST=127.0.0.1 ++ TLS_PD_PORT=2579 ++ TLS_PD_PEER_PORT=2580 ++ UP_TIKV_HOST_1=127.0.0.1 ++ UP_TIKV_PORT_1=20160 ++ UP_TIKV_STATUS_PORT_1=20181 ++ UP_TIKV_HOST_2=127.0.0.1 ++ UP_TIKV_PORT_2=20161 ++ UP_TIKV_STATUS_PORT_2=20182 ++ UP_TIKV_HOST_3=127.0.0.1 ++ UP_TIKV_PORT_3=20162 ++ UP_TIKV_STATUS_PORT_3=20183 ++ DOWN_TIKV_HOST=127.0.0.1 ++ DOWN_TIKV_PORT=21160 ++ DOWN_TIKV_STATUS_PORT=21180 ++ TLS_TIKV_HOST=127.0.0.1 ++ TLS_TIKV_PORT=22160 ++ TLS_TIKV_STATUS_PORT=22180 +++ cat /tmp/tidb_cdc_test/KAFKA_VERSION +++ echo 2.4.1 ++ KAFKA_VERSION=2.4.1 + WORK_DIR=/tmp/tidb_cdc_test/synced_status_with_redo + CDC_BINARY=cdc.test + SINK_TYPE=kafka + CDC_COUNT=3 + DB_COUNT=4 + trap stop_tidb_cluster EXIT + run_normal_case_and_unavailable_pd conf/changefeed-redo.toml + rm -rf /tmp/tidb_cdc_test/synced_status_with_redo + mkdir -p /tmp/tidb_cdc_test/synced_status_with_redo + start_tidb_cluster --workdir /tmp/tidb_cdc_test/synced_status_with_redo The 1 times to try to start tidb cluster... coverage: 2.4% of statements in github.com/pingcap/tiflow/... wait process cdc.test exit for 2-th time... run task successfully check_data_subset force_replicate_table.t1 127.0.0.1 4000 127.0.0.1 3306 table test.t2 not exists for 7-th check, retry later check diff failed 4-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:13:59 CST 2024] <<<<<< run test case processor_stop_delay success! >>>>>> Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) run task successfully check_data_subset force_replicate_table.t2 127.0.0.1 4000 127.0.0.1 3306 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table changefeed_auto_stop_2.usertable exists table changefeed_auto_stop_3.usertable exists table changefeed_auto_stop_4.usertable not exists for 1-th check, retry later table test.finish_mark not exists for 8-th check, retry later table mark.finish_mark_2 not exists for 8-th check, retry later + set +x check_changefeed_state http://127.0.0.1:2379 changefeed-error-2 failed [CDC:ErrSnapshotLostByGC] + endpoints=http://127.0.0.1:2379 + changefeed_id=changefeed-error-2 + expected_state=failed + error_msg='[CDC:ErrSnapshotLostByGC]' + tls_dir='[CDC:ErrSnapshotLostByGC]' + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-error-2 -s run task successfully check_data_subset force_replicate_table.t3 127.0.0.1 4000 127.0.0.1 3306 + info='{ "upstream_id": 7365147271517687926, "namespace": "default", "id": "changefeed-error-2", "state": "failed", "checkpoint_tso": 449532918127919106, "checkpoint_time": "2024-05-04 22:13:13.591", "error": { "time": "2024-05-04T22:13:58.892825932+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSnapshotLostByGC", "message": "[CDC:ErrSnapshotLostByGC]fail to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449532918127919106 is earlier than or equal to GC safepoint at 9223372036854775807" } }' + echo '{ "upstream_id": 7365147271517687926, "namespace": "default", "id": "changefeed-error-2", "state": "failed", "checkpoint_tso": 449532918127919106, "checkpoint_time": "2024-05-04 22:13:13.591", "error": { "time": "2024-05-04T22:13:58.892825932+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSnapshotLostByGC", "message": "[CDC:ErrSnapshotLostByGC]fail to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449532918127919106 is earlier than or equal to GC safepoint at 9223372036854775807" } }' { "upstream_id": 7365147271517687926, "namespace": "default", "id": "changefeed-error-2", "state": "failed", "checkpoint_tso": 449532918127919106, "checkpoint_time": "2024-05-04 22:13:13.591", "error": { "time": "2024-05-04T22:13:58.892825932+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSnapshotLostByGC", "message": "[CDC:ErrSnapshotLostByGC]fail to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449532918127919106 is earlier than or equal to GC safepoint at 9223372036854775807" } } ++ echo '{' '"upstream_id":' 7365147271517687926, '"namespace":' '"default",' '"id":' '"changefeed-error-2",' '"state":' '"failed",' '"checkpoint_tso":' 449532918127919106, '"checkpoint_time":' '"2024-05-04' '22:13:13.591",' '"error":' '{' '"time":' '"2024-05-04T22:13:58.892825932+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrSnapshotLostByGC",' '"message":' '"[CDC:ErrSnapshotLostByGC]fail' to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449532918127919106 is earlier than or equal to GC safepoint at '9223372036854775807"' '}' '}' ++ jq -r .state + state=failed + [[ ! failed == \f\a\i\l\e\d ]] ++ jq -r .error.message ++ echo '{' '"upstream_id":' 7365147271517687926, '"namespace":' '"default",' '"id":' '"changefeed-error-2",' '"state":' '"failed",' '"checkpoint_tso":' 449532918127919106, '"checkpoint_time":' '"2024-05-04' '22:13:13.591",' '"error":' '{' '"time":' '"2024-05-04T22:13:58.892825932+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrSnapshotLostByGC",' '"message":' '"[CDC:ErrSnapshotLostByGC]fail' to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449532918127919106 is earlier than or equal to GC safepoint at '9223372036854775807"' '}' '}' + message='[CDC:ErrSnapshotLostByGC]fail to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449532918127919106 is earlier than or equal to GC safepoint at 9223372036854775807' + [[ ! [CDC:ErrSnapshotLostByGC]fail to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449532918127919106 is earlier than or equal to GC safepoint at 9223372036854775807 =~ \[CDC:ErrSnapshotLostByGC] ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6734.out cli changefeed remove -c changefeed-error-2 \033[0;36m<<< Run all test success >>>\033[0m [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc already exists) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // timeout [Pipeline] } table test.t2 not exists for 8-th check, retry later run task successfully check_data_subset force_replicate_table.t4 127.0.0.1 4000 127.0.0.1 3306 [Pipeline] // stage [Pipeline] } [Pipeline] // container [Pipeline] } check diff failed 5-th time, retry later [2024/05/04 22:14:00.874 +08:00] [INFO] [main.go:86] ["running ddl test: 1 truncateDDL"] [2024/05/04 22:14:01.166 +08:00] [INFO] [main.go:220] ["1 insert success: 100"] [2024/05/04 22:14:01.169 +08:00] [INFO] [main.go:220] ["0 insert success: 100"] [2024/05/04 22:14:01.334 +08:00] [INFO] [main.go:220] ["1 insert success: 200"] [2024/05/04 22:14:01.342 +08:00] [INFO] [main.go:234] ["1 delete success: 100"] [2024/05/04 22:14:01.361 +08:00] [INFO] [main.go:220] ["0 insert success: 200"] [2024/05/04 22:14:01.363 +08:00] [INFO] [main.go:234] ["0 delete success: 100"] [Pipeline] // withEnv [Pipeline] } Changefeed remove successfully. ID: changefeed-error-2 CheckpointTs: 449532918127919106 SinkURI: kafka://127.0.0.1:9092/ticdc-sink-retry-test-8220?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd16894000c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6, pid:9363, start at 2024-05-04 22:13:58.44844534 +0800 CST m=+5.359286937 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:58.458 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:58.437 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:58.437 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd16894000c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6, pid:9363, start at 2024-05-04 22:13:58.44844534 +0800 CST m=+5.359286937 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:58.458 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:58.437 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:58.437 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd16a18000a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6, pid:9438, start at 2024-05-04 22:13:58.548698158 +0800 CST m=+5.406560784 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:58.556 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:58.534 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:58.534 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [Pipeline] // node [2024/05/04 22:14:01.497 +08:00] [INFO] [main.go:220] ["1 insert success: 300"] [2024/05/04 22:14:01.540 +08:00] [INFO] [main.go:220] ["0 insert success: 300"] [2024/05/04 22:14:01.676 +08:00] [INFO] [main.go:220] ["1 insert success: 400"] [2024/05/04 22:14:01.684 +08:00] [INFO] [main.go:234] ["1 delete success: 200"] [Pipeline] } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] // podTemplate table changefeed_auto_stop_4.usertable exists [Pipeline] } [Pipeline] // withEnv [2024/05/04 22:14:01.722 +08:00] [INFO] [main.go:220] ["0 insert success: 400"] [2024/05/04 22:14:01.725 +08:00] [INFO] [main.go:234] ["0 delete success: 200"] [2024/05/04 22:14:01.847 +08:00] [INFO] [main.go:220] ["1 insert success: 500"] [2024/05/04 22:14:01.900 +08:00] [INFO] [main.go:220] ["0 insert success: 500"] [Pipeline] } table test.finish_mark not exists for 9-th check, retry later [Pipeline] // stage check diff failed 1-th time, retry later [Pipeline] } run task successfully check_data_subset force_replicate_table.t5 127.0.0.1 4000 127.0.0.1 3306 table mark.finish_mark_2 not exists for 9-th check, retry later [2024/05/04 22:14:02.022 +08:00] [INFO] [main.go:220] ["1 insert success: 600"] [2024/05/04 22:14:02.033 +08:00] [INFO] [main.go:234] ["1 delete success: 300"] [2024/05/04 22:14:02.084 +08:00] [INFO] [main.go:220] ["0 insert success: 600"] [2024/05/04 22:14:02.090 +08:00] [INFO] [main.go:234] ["0 delete success: 300"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:14:02.269 +08:00] [INFO] [main.go:220] ["0 insert success: 700"] [2024/05/04 22:14:02.296 +08:00] [INFO] [main.go:220] ["1 insert success: 700"] [2024/05/04 22:14:02.449 +08:00] [INFO] [main.go:220] ["0 insert success: 800"] [2024/05/04 22:14:02.464 +08:00] [INFO] [main.go:234] ["0 delete success: 400"] [2024/05/04 22:14:02.485 +08:00] [INFO] [main.go:220] ["1 insert success: 800"] run task successfully check_data_subset force_replicate_table.t6 127.0.0.1 4000 127.0.0.1 3306 id=19,a=NULL doesn't exist in downstream table force_replicate_table.t6 run task failed 1-th time, retry later [2024/05/04 22:14:02.499 +08:00] [INFO] [main.go:234] ["1 delete success: 400"] [2024/05/04 22:14:02.622 +08:00] [INFO] [main.go:220] ["0 insert success: 900"] [2024/05/04 22:14:02.681 +08:00] [INFO] [main.go:220] ["1 insert success: 900"] + set +x [2024/05/04 22:14:02.803 +08:00] [INFO] [main.go:220] ["0 insert success: 1000"] [2024/05/04 22:14:02.822 +08:00] [INFO] [main.go:234] ["0 delete success: 500"] [2024/05/04 22:14:02.864 +08:00] [INFO] [main.go:220] ["1 insert success: 1000"] [2024/05/04 22:14:02.882 +08:00] [INFO] [main.go:234] ["1 delete success: 500"] [2024/05/04 22:14:02.970 +08:00] [INFO] [main.go:220] ["0 insert success: 1100"] [2024/05/04 22:14:03.059 +08:00] [INFO] [main.go:220] ["1 insert success: 1100"] [2024/05/04 22:14:03.147 +08:00] [INFO] [main.go:220] ["0 insert success: 1200"] [2024/05/04 22:14:03.171 +08:00] [INFO] [main.go:234] ["0 delete success: 600"] [2024/05/04 22:14:03.258 +08:00] [INFO] [main.go:220] ["1 insert success: 1200"] [2024/05/04 22:14:03.284 +08:00] [INFO] [main.go:234] ["1 delete success: 600"] [2024/05/04 22:14:03.326 +08:00] [INFO] [main.go:220] ["0 insert success: 1300"] [2024/05/04 22:14:03.460 +08:00] [INFO] [main.go:220] ["1 insert success: 1300"] [2024/05/04 22:14:03.501 +08:00] [INFO] [main.go:220] ["0 insert success: 1400"] wait process cdc.test exit for 1-th time... [Sat May 4 22:14:03 CST 2024] <<<<<< START cdc server in kafka_sink_error_resume case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/sink/dmlsink/mq/dmlproducer/KafkaSinkAsyncSendError=1*return(true)' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_sink_error_resume.1082110823.out server --log-file /tmp/tidb_cdc_test/kafka_sink_error_resume/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_sink_error_resume/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.t2 not exists for 9-th check, retry later [2024/05/04 22:14:03.531 +08:00] [INFO] [main.go:234] ["0 delete success: 700"] [2024/05/04 22:14:03.656 +08:00] [INFO] [main.go:220] ["1 insert success: 1400"] [2024/05/04 22:14:03.683 +08:00] [INFO] [main.go:220] ["0 insert success: 1500"] [2024/05/04 22:14:03.684 +08:00] [INFO] [main.go:234] ["1 delete success: 700"] wait process cdc.test exit for 2-th time... check diff successfully [2024/05/04 22:14:03.855 +08:00] [INFO] [main.go:220] ["1 insert success: 1500"] [2024/05/04 22:14:03.860 +08:00] [INFO] [main.go:220] ["0 insert success: 1600"] [2024/05/04 22:14:03.885 +08:00] [INFO] [main.go:234] ["0 delete success: 800"] table mark.finish_mark_2 not exists for 10-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:14:04.030 +08:00] [INFO] [main.go:220] ["0 insert success: 1700"] [2024/05/04 22:14:04.046 +08:00] [INFO] [main.go:220] ["1 insert success: 1600"] [2024/05/04 22:14:04.077 +08:00] [INFO] [main.go:234] ["1 delete success: 800"] [2024/05/04 22:14:04.225 +08:00] [INFO] [main.go:220] ["0 insert success: 1800"] [2024/05/04 22:14:04.239 +08:00] [INFO] [main.go:220] ["1 insert success: 1700"] [2024/05/04 22:14:04.262 +08:00] [INFO] [main.go:234] ["0 delete success: 900"] cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:14:04 CST 2024] <<<<<< START cdc server in changefeed_error case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/redo/ChangefeedNewRedoManagerError=2*return(true)' + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.67906792.out server --log-file /tmp/tidb_cdc_test/changefeed_error/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_error/cdc_data --cluster-id default + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [2024/05/04 22:14:04.405 +08:00] [INFO] [main.go:220] ["0 insert success: 1900"] [2024/05/04 22:14:04.438 +08:00] [INFO] [main.go:220] ["1 insert success: 1800"] [2024/05/04 22:14:04.471 +08:00] [INFO] [main.go:234] ["1 delete success: 900"] table test.finish_mark not exists for 10-th check, retry later check_data_subset force_replicate_table.t6 127.0.0.1 4000 127.0.0.1 3306 check diff failed 2-th time, retry later [2024/05/04 22:14:04.583 +08:00] [INFO] [main.go:220] ["0 insert success: 2000"] [2024/05/04 22:14:04.628 +08:00] [INFO] [main.go:234] ["0 delete success: 1000"] [2024/05/04 22:14:04.628 +08:00] [INFO] [main.go:220] ["1 insert success: 1900"] [2024/05/04 22:14:04.795 +08:00] [INFO] [main.go:220] ["0 insert success: 2100"] [2024/05/04 22:14:04.852 +08:00] [INFO] [main.go:220] ["1 insert success: 2000"] [2024/05/04 22:14:04.891 +08:00] [INFO] [main.go:234] ["1 delete success: 1000"] [2024/05/04 22:14:05.051 +08:00] [INFO] [main.go:220] ["1 insert success: 2100"] check diff failed 1-th time, retry later [2024/05/04 22:14:05.071 +08:00] [INFO] [main.go:220] ["0 insert success: 2200"] [2024/05/04 22:14:05.122 +08:00] [INFO] [main.go:234] ["0 delete success: 1100"] [2024/05/04 22:14:05.243 +08:00] [INFO] [main.go:220] ["1 insert success: 2200"] [2024/05/04 22:14:05.249 +08:00] [INFO] [main.go:220] ["0 insert success: 2300"] [2024/05/04 22:14:05.289 +08:00] [INFO] [main.go:234] ["1 delete success: 1100"] run task successfully ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:14:05.434 +08:00] [INFO] [main.go:220] ["0 insert success: 2400"] [2024/05/04 22:14:05.439 +08:00] [INFO] [main.go:220] ["1 insert success: 2300"] [2024/05/04 22:14:05.492 +08:00] [INFO] [main.go:234] ["0 delete success: 1200"] table test.t2 not exists for 10-th check, retry later [2024/05/04 22:14:05.617 +08:00] [INFO] [main.go:220] ["0 insert success: 2500"] [2024/05/04 22:14:05.627 +08:00] [INFO] [main.go:220] ["1 insert success: 2400"] [2024/05/04 22:14:05.670 +08:00] [INFO] [main.go:234] ["1 delete success: 1200"] [2024/05/04 22:14:05.801 +08:00] [INFO] [main.go:220] ["0 insert success: 2600"] [2024/05/04 22:14:05.807 +08:00] [INFO] [main.go:220] ["1 insert success: 2500"] table mark.finish_mark_2 not exists for 11-th check, retry later wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... table test.finish_mark not exists for 11-th check, retry later check diff successfully ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 1-th time... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:14:06 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/1a8fe960-ed64-49cf-99ff-51ba826f171d {"id":"1a8fe960-ed64-49cf-99ff-51ba826f171d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832043} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f42f5acd 1a8fe960-ed64-49cf-99ff-51ba826f171d /tidb/cdc/default/default/upstream/7365147486131195233 {"id":7365147486131195233,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/1a8fe960-ed64-49cf-99ff-51ba826f171d {"id":"1a8fe960-ed64-49cf-99ff-51ba826f171d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832043} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f42f5acd 1a8fe960-ed64-49cf-99ff-51ba826f171d /tidb/cdc/default/default/upstream/7365147486131195233 {"id":7365147486131195233,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/1a8fe960-ed64-49cf-99ff-51ba826f171d {"id":"1a8fe960-ed64-49cf-99ff-51ba826f171d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832043} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f42f5acd 1a8fe960-ed64-49cf-99ff-51ba826f171d /tidb/cdc/default/default/upstream/7365147486131195233 {"id":7365147486131195233,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 22:14:06 CST 2024] <<<<<< START kafka consumer in kafka_sink_error_resume case >>>>>> cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:14:06 CST 2024] <<<<<< run test case force_replicate_table success! >>>>>> start tidb cluster in /tmp/tidb_cdc_test/synced_status_with_redo Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... wait process cdc.test exit for 2-th time... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd1d9cc0013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-jvsw5-4r66k, pid:7264, start at 2024-05-04 22:14:05.715678237 +0800 CST m=+24.429345044 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:16:05.725 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:14:05.683 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:04:05.683 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd1d9cc0013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-jvsw5-4r66k, pid:7264, start at 2024-05-04 22:14:05.715678237 +0800 CST m=+24.429345044 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:16:05.725 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:14:05.683 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:04:05.683 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd0cf300017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-jvsw5-4r66k, pid:7342, start at 2024-05-04 22:13:48.648753987 +0800 CST m=+7.315757413 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:15:48.657 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:13:48.620 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:03:48.620 +0800 All versions after safe point can be accessed. (DO NOT EDIT) table test.t2 not exists for 11-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:14:07 GMT < Content-Length: 883 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/394eff6e-ac9e-4ac4-967c-5275dd1f86a2 {"id":"394eff6e-ac9e-4ac4-967c-5275dd1f86a2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832044} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 7 /tidb/cdc/default/__cdc_meta__/owner/22318f43f36d6e0c 394eff6e-ac9e-4ac4-967c-5275dd1f86a2 /tidb/cdc/default/default/upstream/7365147271517687926 {"id":7365147271517687926,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/394eff6e-ac9e-4ac4-967c-5275dd1f86a2 {"id":"394eff6e-ac9e-4ac4-967c-5275dd1f86a2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832044} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 7 /tidb/cdc/default/__cdc_meta__/owner/22318f43f36d6e0c 394eff6e-ac9e-4ac4-967c-5275dd1f86a2 /tidb/cdc/default/default/upstream/7365147271517687926 {"id":7365147271517687926,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/394eff6e-ac9e-4ac4-967c-5275dd1f86a2 {"id":"394eff6e-ac9e-4ac4-967c-5275dd1f86a2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832044} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 7 /tidb/cdc/default/__cdc_meta__/owner/22318f43f36d6e0c 394eff6e-ac9e-4ac4-967c-5275dd1f86a2 /tidb/cdc/default/default/upstream/7365147271517687926 {"id":7365147271517687926,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6842.out cli changefeed create --start-ts=0 '--sink-uri=kafka://127.0.0.1:9092/ticdc-sink-retry-test-8220?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c changefeed-initialize-error Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/multi_rocks/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/multi_rocks/tiflash/log/error.log arg matches is ArgMatches { args: {"log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_rocks/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_rocks/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_rocks/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } check diff failed 2-th time, retry later Create changefeed successfully! ID: changefeed-initialize-error Info: {"upstream_id":7365147271517687926,"namespace":"default","id":"changefeed-initialize-error","sink_uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-8220?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:14:07.815303121+08:00","start_ts":449532932309909506,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532932309909506,"checkpoint_ts":449532932309909506,"checkpoint_time":"2024-05-04 22:14:07.691"} PASS wait process cdc.test exit for 3-th time... coverage: 2.4% of statements in github.com/pingcap/tiflow/... cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 22:14:08 CST 2024] <<<<<< run test case changefeed_auto_stop success! >>>>>> table test.finish_mark not exists for 12-th check, retry later table mark.finish_mark_2 not exists for 12-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd1c9740017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-v3qw7-1d792, pid:21437, start at 2024-05-04 22:14:04.681820856 +0800 CST m=+5.048486746 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:16:04.689 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:14:04.687 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:04:04.687 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd1c9740017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-v3qw7-1d792, pid:21437, start at 2024-05-04 22:14:04.681820856 +0800 CST m=+5.048486746 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:16:04.689 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:14:04.687 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:04:04.687 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd1cbc00015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-v3qw7-1d792, pid:21523, start at 2024-05-04 22:14:04.827917414 +0800 CST m=+5.144616950 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:16:04.835 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:14:04.834 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:04:04.834 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/capture_session_done_during_task/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/capture_session_done_during_task/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/capture_session_done_during_task/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/capture_session_done_during_task/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/capture_session_done_during_task/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } check_changefeed_status 127.0.0.1:8300 7d1a8586-68af-4913-9f72-854aa7238def warning last_warning kafka sink injected error + endpoint=127.0.0.1:8300 + changefeed_id=7d1a8586-68af-4913-9f72-854aa7238def + expected_state=warning + field=last_warning + error_pattern=kafka ++ curl 127.0.0.1:8300/api/v2/changefeeds/7d1a8586-68af-4913-9f72-854aa7238def/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 86 100 86 0 0 640 0 --:--:-- --:--:-- --:--:-- 641 + info='{"state":"normal","resolved_ts":449532932217372689,"checkpoint_ts":449532932217372689}' + echo '{"state":"normal","resolved_ts":449532932217372689,"checkpoint_ts":449532932217372689}' {"state":"normal","resolved_ts":449532932217372689,"checkpoint_ts":449532932217372689} ++ echo '{"state":"normal","resolved_ts":449532932217372689,"checkpoint_ts":449532932217372689}' ++ jq -r .state + state=normal + [[ ! normal == \w\a\r\n\i\n\g ]] + echo 'changefeed state normal does not equal to warning' changefeed state normal does not equal to warning + exit 1 run task failed 1-th time, retry later table test.t2 not exists for 12-th check, retry later + set +x check_changefeed_state http://127.0.0.1:2379 changefeed-initialize-error normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=changefeed-initialize-error + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-initialize-error -s Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release check diff failed 3-th time, retry later + info='{ "upstream_id": 7365147271517687926, "namespace": "default", "id": "changefeed-initialize-error", "state": "warning", "checkpoint_tso": 449532932309909506, "checkpoint_time": "2024-05-04 22:14:07.691", "error": { "time": "2024-05-04T22:14:07.997613408+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrProcessorUnknown", "message": "changefeed new redo manager injected error" } }' + echo '{ "upstream_id": 7365147271517687926, "namespace": "default", "id": "changefeed-initialize-error", "state": "warning", "checkpoint_tso": 449532932309909506, "checkpoint_time": "2024-05-04 22:14:07.691", "error": { "time": "2024-05-04T22:14:07.997613408+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrProcessorUnknown", "message": "changefeed new redo manager injected error" } }' { "upstream_id": 7365147271517687926, "namespace": "default", "id": "changefeed-initialize-error", "state": "warning", "checkpoint_tso": 449532932309909506, "checkpoint_time": "2024-05-04 22:14:07.691", "error": { "time": "2024-05-04T22:14:07.997613408+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrProcessorUnknown", "message": "changefeed new redo manager injected error" } } ++ echo '{' '"upstream_id":' 7365147271517687926, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"warning",' '"checkpoint_tso":' 449532932309909506, '"checkpoint_time":' '"2024-05-04' '22:14:07.691",' '"error":' '{' '"time":' '"2024-05-04T22:14:07.997613408+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrProcessorUnknown",' '"message":' '"changefeed' new redo manager injected 'error"' '}' '}' ++ jq -r .state + state=warning + [[ ! warning == \n\o\r\m\a\l ]] + echo 'changefeed state warning does not equal to normal' changefeed state warning does not equal to normal + exit 1 run task failed 1-th time, retry later table test.finish_mark not exists for 13-th check, retry later [Sat May 4 22:14:09 CST 2024] <<<<<< START cdc server in multi_rocks case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_rocks.1998419986.out server --log-file /tmp/tidb_cdc_test/multi_rocks/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_rocks/cdc_data --cluster-id default + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table mark.finish_mark_2 not exists for 13-th check, retry later check_changefeed_status 127.0.0.1:8300 7d1a8586-68af-4913-9f72-854aa7238def warning last_warning kafka sink injected error + endpoint=127.0.0.1:8300 + changefeed_id=7d1a8586-68af-4913-9f72-854aa7238def + expected_state=warning + field=last_warning + error_pattern=kafka ++ curl 127.0.0.1:8300/api/v2/changefeeds/7d1a8586-68af-4913-9f72-854aa7238def/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 86 100 86 0 0 569 0 --:--:-- --:--:-- --:--:-- 573 + info='{"state":"normal","resolved_ts":449532932951113733,"checkpoint_ts":449532932348182562}' + echo '{"state":"normal","resolved_ts":449532932951113733,"checkpoint_ts":449532932348182562}' {"state":"normal","resolved_ts":449532932951113733,"checkpoint_ts":449532932348182562} ++ echo '{"state":"normal","resolved_ts":449532932951113733,"checkpoint_ts":449532932348182562}' ++ jq -r .state + state=normal + [[ ! normal == \w\a\r\n\i\n\g ]] + echo 'changefeed state normal does not equal to warning' changefeed state normal does not equal to warning + exit 1 run task failed 2-th time, retry later [2024/05/04 22:14:11.058 +08:00] [INFO] [main.go:86] ["running ddl test: 2 addDropColumnDDL"] table test.t2 exists [2024/05/04 22:14:11.247 +08:00] [INFO] [main.go:220] ["0 insert success: 100"] [2024/05/04 22:14:11.253 +08:00] [INFO] [main.go:220] ["1 insert success: 100"] Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff successfully check diff failed 4-th time, retry later check_changefeed_state http://127.0.0.1:2379 changefeed-initialize-error normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=changefeed-initialize-error + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-initialize-error -s [2024/05/04 22:14:11.408 +08:00] [INFO] [main.go:220] ["0 insert success: 200"] [2024/05/04 22:14:11.410 +08:00] [INFO] [main.go:234] ["0 delete success: 100"] [2024/05/04 22:14:11.419 +08:00] [INFO] [main.go:220] ["1 insert success: 200"] [2024/05/04 22:14:11.421 +08:00] [INFO] [main.go:234] ["1 delete success: 100"] [2024/05/04 22:14:11.566 +08:00] [INFO] [main.go:220] ["0 insert success: 300"] [2024/05/04 22:14:11.581 +08:00] [INFO] [main.go:220] ["1 insert success: 300"] + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.capture_session_done_during_task.cli.23004.out cli tso query --pd=http://127.0.0.1:2379 + info='{ "upstream_id": 7365147271517687926, "namespace": "default", "id": "changefeed-initialize-error", "state": "normal", "checkpoint_tso": 449532933188091908, "checkpoint_time": "2024-05-04 22:14:11.041", "error": null }' + echo '{ "upstream_id": 7365147271517687926, "namespace": "default", "id": "changefeed-initialize-error", "state": "normal", "checkpoint_tso": 449532933188091908, "checkpoint_time": "2024-05-04 22:14:11.041", "error": null }' { "upstream_id": 7365147271517687926, "namespace": "default", "id": "changefeed-initialize-error", "state": "normal", "checkpoint_tso": 449532933188091908, "checkpoint_time": "2024-05-04 22:14:11.041", "error": null } ++ echo '{' '"upstream_id":' 7365147271517687926, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"normal",' '"checkpoint_tso":' 449532933188091908, '"checkpoint_time":' '"2024-05-04' '22:14:11.041",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365147271517687926, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"normal",' '"checkpoint_tso":' 449532933188091908, '"checkpoint_time":' '"2024-05-04' '22:14:11.041",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6958.out cli changefeed pause -c changefeed-initialize-error [2024/05/04 22:14:11.730 +08:00] [INFO] [main.go:220] ["0 insert success: 400"] [2024/05/04 22:14:11.732 +08:00] [INFO] [main.go:234] ["0 delete success: 200"] [2024/05/04 22:14:11.762 +08:00] [INFO] [main.go:220] ["1 insert success: 400"] [2024/05/04 22:14:11.764 +08:00] [INFO] [main.go:234] ["1 delete success: 200"] =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/partition_table/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table test.finish_mark exists [2024/05/04 22:14:11.911 +08:00] [INFO] [main.go:220] ["0 insert success: 500"] [2024/05/04 22:14:11.937 +08:00] [INFO] [main.go:220] ["1 insert success: 500"] [2024/05/04 22:14:12.094 +08:00] [INFO] [main.go:220] ["0 insert success: 600"] wait process cdc.test exit for 1-th time... table mark.finish_mark_2 exists table mark.finish_mark_3 not exists for 1-th check, retry later check diff successfully [2024/05/04 22:14:12.096 +08:00] [INFO] [main.go:234] ["0 delete success: 300"] [2024/05/04 22:14:12.133 +08:00] [INFO] [main.go:220] ["1 insert success: 600"] [2024/05/04 22:14:12.135 +08:00] [INFO] [main.go:234] ["1 delete success: 300"] [2024/05/04 22:14:12.276 +08:00] [INFO] [main.go:220] ["0 insert success: 700"] [2024/05/04 22:14:12.312 +08:00] [INFO] [main.go:220] ["1 insert success: 700"] wait process cdc.test exit for 2-th time... PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... [2024/05/04 22:14:12.455 +08:00] [INFO] [main.go:220] ["0 insert success: 800"] [2024/05/04 22:14:12.457 +08:00] [INFO] [main.go:234] ["0 delete success: 400"] [2024/05/04 22:14:12.496 +08:00] [INFO] [main.go:220] ["1 insert success: 800"] [2024/05/04 22:14:12.499 +08:00] [INFO] [main.go:234] ["1 delete success: 400"] + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:14:12 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f855ad7e-33a2-478f-8465-ef4ba4120a62 {"id":"f855ad7e-33a2-478f-8465-ef4ba4120a62","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832050} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f40716fd f855ad7e-33a2-478f-8465-ef4ba4120a62 /tidb/cdc/default/default/upstream/7365147438410010153 {"id":7365147438410010153,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f855ad7e-33a2-478f-8465-ef4ba4120a62 {"id":"f855ad7e-33a2-478f-8465-ef4ba4120a62","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832050} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f40716fd f855ad7e-33a2-478f-8465-ef4ba4120a62 /tidb/cdc/default/default/upstream/7365147438410010153 {"id":7365147438410010153,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f855ad7e-33a2-478f-8465-ef4ba4120a62 {"id":"f855ad7e-33a2-478f-8465-ef4ba4120a62","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832050} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f40716fd f855ad7e-33a2-478f-8465-ef4ba4120a62 /tidb/cdc/default/default/upstream/7365147438410010153 {"id":7365147438410010153,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_rocks.cli.20942.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-multi-rocks-test-32388?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' [2024/05/04 22:14:12.624 +08:00] [INFO] [main.go:220] ["0 insert success: 900"] [2024/05/04 22:14:12.672 +08:00] [INFO] [main.go:220] ["1 insert success: 900"] [2024/05/04 22:14:12.796 +08:00] [INFO] [main.go:220] ["0 insert success: 1000"] [2024/05/04 22:14:12.798 +08:00] [INFO] [main.go:234] ["0 delete success: 500"] [2024/05/04 22:14:12.847 +08:00] [INFO] [main.go:220] ["1 insert success: 1000"] [2024/05/04 22:14:12.849 +08:00] [INFO] [main.go:234] ["1 delete success: 500"] cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:14:12 CST 2024] <<<<<< run test case resolve_lock success! >>>>>> [2024/05/04 22:14:12.963 +08:00] [INFO] [main.go:220] ["0 insert success: 1100"] [2024/05/04 22:14:13.021 +08:00] [INFO] [main.go:220] ["1 insert success: 1100"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Create changefeed successfully! ID: 6d22731f-5108-4561-9b9e-b07b9229ae06 Info: {"upstream_id":7365147438410010153,"namespace":"default","id":"6d22731f-5108-4561-9b9e-b07b9229ae06","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-rocks-test-32388?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:14:13.23660267+08:00","start_ts":449532933723389954,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532933723389954,"checkpoint_ts":449532933723389954,"checkpoint_time":"2024-05-04 22:14:13.083"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... [2024/05/04 22:14:13.135 +08:00] [INFO] [main.go:220] ["0 insert success: 1200"] [2024/05/04 22:14:13.137 +08:00] [INFO] [main.go:234] ["0 delete success: 600"] [2024/05/04 22:14:13.202 +08:00] [INFO] [main.go:220] ["1 insert success: 1200"] [2024/05/04 22:14:13.204 +08:00] [INFO] [main.go:234] ["1 delete success: 600"] [2024/05/04 22:14:13.311 +08:00] [INFO] [main.go:220] ["0 insert success: 1300"] [2024/05/04 22:14:13.382 +08:00] [INFO] [main.go:220] ["1 insert success: 1300"] [2024/05/04 22:14:13.501 +08:00] [INFO] [main.go:220] ["0 insert success: 1400"] [2024/05/04 22:14:13.503 +08:00] [INFO] [main.go:234] ["0 delete success: 700"] [2024/05/04 22:14:13.580 +08:00] [INFO] [main.go:220] ["1 insert success: 1400"] [2024/05/04 22:14:13.582 +08:00] [INFO] [main.go:234] ["1 delete success: 700"] + set +x + tso='449532933462556673 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532933462556673 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 22:14:13 CST 2024] <<<<<< START cdc server in capture_session_done_during_task case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/processorManagerHandleNewChangefeedDelay=sleep(2000)' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.capture_session_done_during_task.2305723059.out server --log-file /tmp/tidb_cdc_test/capture_session_done_during_task/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/capture_session_done_during_task/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + set +x check_changefeed_state http://127.0.0.1:2379 changefeed-initialize-error stopped changefeed new redo manager injected error + endpoints=http://127.0.0.1:2379 + changefeed_id=changefeed-initialize-error + expected_state=stopped + error_msg=changefeed + tls_dir=error + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-initialize-error -s [2024/05/04 22:14:13.691 +08:00] [INFO] [main.go:220] ["0 insert success: 1500"] [2024/05/04 22:14:13.766 +08:00] [INFO] [main.go:220] ["1 insert success: 1500"] [2024/05/04 22:14:13.870 +08:00] [INFO] [main.go:220] ["0 insert success: 1600"] [2024/05/04 22:14:13.872 +08:00] [INFO] [main.go:234] ["0 delete success: 800"] + info='{ "upstream_id": 7365147271517687926, "namespace": "default", "id": "changefeed-initialize-error", "state": "stopped", "checkpoint_tso": 449532933188091908, "checkpoint_time": "2024-05-04 22:14:11.041", "error": { "time": "2024-05-04T22:14:07.997613408+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrProcessorUnknown", "message": "changefeed new redo manager injected error" } }' + echo '{ "upstream_id": 7365147271517687926, "namespace": "default", "id": "changefeed-initialize-error", "state": "stopped", "checkpoint_tso": 449532933188091908, "checkpoint_time": "2024-05-04 22:14:11.041", "error": { "time": "2024-05-04T22:14:07.997613408+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrProcessorUnknown", "message": "changefeed new redo manager injected error" } }' { "upstream_id": 7365147271517687926, "namespace": "default", "id": "changefeed-initialize-error", "state": "stopped", "checkpoint_tso": 449532933188091908, "checkpoint_time": "2024-05-04 22:14:11.041", "error": { "time": "2024-05-04T22:14:07.997613408+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrProcessorUnknown", "message": "changefeed new redo manager injected error" } } ++ echo '{' '"upstream_id":' 7365147271517687926, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"stopped",' '"checkpoint_tso":' 449532933188091908, '"checkpoint_time":' '"2024-05-04' '22:14:11.041",' '"error":' '{' '"time":' '"2024-05-04T22:14:07.997613408+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrProcessorUnknown",' '"message":' '"changefeed' new redo manager injected 'error"' '}' '}' ++ jq -r .state + state=stopped + [[ ! stopped == \s\t\o\p\p\e\d ]] ++ echo '{' '"upstream_id":' 7365147271517687926, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"stopped",' '"checkpoint_tso":' 449532933188091908, '"checkpoint_time":' '"2024-05-04' '22:14:11.041",' '"error":' '{' '"time":' '"2024-05-04T22:14:07.997613408+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrProcessorUnknown",' '"message":' '"changefeed' new redo manager injected 'error"' '}' '}' ++ jq -r .error.message + message='changefeed new redo manager injected error' + [[ ! changefeed new redo manager injected error =~ changefeed ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.7034.out cli changefeed resume -c changefeed-initialize-error check diff failed 5-th time, retry later table mark.finish_mark_3 not exists for 2-th check, retry later [2024/05/04 22:14:13.944 +08:00] [INFO] [main.go:220] ["1 insert success: 1600"] [2024/05/04 22:14:13.945 +08:00] [INFO] [main.go:234] ["1 delete success: 800"] [2024/05/04 22:14:14.056 +08:00] [INFO] [main.go:220] ["0 insert success: 1700"] [2024/05/04 22:14:14.120 +08:00] [INFO] [main.go:220] ["1 insert success: 1700"] [2024/05/04 22:14:14.231 +08:00] [INFO] [main.go:220] ["0 insert success: 1800"] [2024/05/04 22:14:14.233 +08:00] [INFO] [main.go:234] ["0 delete success: 900"] [2024/05/04 22:14:14.296 +08:00] [INFO] [main.go:220] ["1 insert success: 1800"] [2024/05/04 22:14:14.298 +08:00] [INFO] [main.go:234] ["1 delete success: 900"] PASS [2024/05/04 22:14:14.402 +08:00] [INFO] [main.go:220] ["0 insert success: 1900"] [2024/05/04 22:14:14.469 +08:00] [INFO] [main.go:220] ["1 insert success: 1900"] [2024/05/04 22:14:14.572 +08:00] [INFO] [main.go:220] ["0 insert success: 2000"] [2024/05/04 22:14:14.573 +08:00] [INFO] [main.go:234] ["0 delete success: 1000"] [2024/05/04 22:14:14.632 +08:00] [INFO] [main.go:220] ["1 insert success: 2000"] [2024/05/04 22:14:14.634 +08:00] [INFO] [main.go:234] ["1 delete success: 1000"] coverage: 2.1% of statements in github.com/pingcap/tiflow/... [2024/05/04 22:14:14.751 +08:00] [INFO] [main.go:220] ["0 insert success: 2100"] [2024/05/04 22:14:14.809 +08:00] [INFO] [main.go:220] ["1 insert success: 2100"] check_changefeed_status 127.0.0.1:8300 7d1a8586-68af-4913-9f72-854aa7238def warning last_warning kafka sink injected error + endpoint=127.0.0.1:8300 + changefeed_id=7d1a8586-68af-4913-9f72-854aa7238def + expected_state=warning + field=last_warning + error_pattern=kafka ++ curl 127.0.0.1:8300/api/v2/changefeeds/7d1a8586-68af-4913-9f72-854aa7238def/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 2211 0 --:--:-- --:--:-- --:--:-- 2218 + info='{"state":"warning","resolved_ts":449532933999689735,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449532933999689735,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449532933999689735,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449532933999689735,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \w\a\r\n\i\n\g ]] + [[ -z last_warning ]] ++ echo '{"state":"warning","resolved_ts":449532933999689735,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .last_warning.message + error_msg='kafka sink injected error' + [[ ! kafka sink injected error =~ kafka ]] run task successfully start tidb cluster in /tmp/tidb_cdc_test/partition_table Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:14:14.932 +08:00] [INFO] [main.go:220] ["0 insert success: 2200"] [2024/05/04 22:14:14.934 +08:00] [INFO] [main.go:234] ["0 delete success: 1100"] [2024/05/04 22:14:14.974 +08:00] [INFO] [main.go:220] ["1 insert success: 2200"] [2024/05/04 22:14:14.976 +08:00] [INFO] [main.go:234] ["1 delete success: 1100"] [2024/05/04 22:14:15.105 +08:00] [INFO] [main.go:220] ["0 insert success: 2300"] [2024/05/04 22:14:15.138 +08:00] [INFO] [main.go:220] ["1 insert success: 2300"] + set +x [Sat May 4 22:14:14 CST 2024] <<<<<< START kafka consumer in multi_rocks case >>>>>> [2024/05/04 22:14:15.282 +08:00] [INFO] [main.go:220] ["0 insert success: 2400"] [2024/05/04 22:14:15.284 +08:00] [INFO] [main.go:234] ["0 delete success: 1200"] [2024/05/04 22:14:15.315 +08:00] [INFO] [main.go:220] ["1 insert success: 2400"] [2024/05/04 22:14:15.316 +08:00] [INFO] [main.go:234] ["1 delete success: 1200"] check_changefeed_status 127.0.0.1:8300 7d1a8586-68af-4913-9f72-854aa7238def normal + endpoint=127.0.0.1:8300 + changefeed_id=7d1a8586-68af-4913-9f72-854aa7238def + expected_state=normal + field= + error_pattern= ++ curl 127.0.0.1:8300/api/v2/changefeeds/7d1a8586-68af-4913-9f72-854aa7238def/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed [2024/05/04 22:14:15.473 +08:00] [INFO] [main.go:220] ["0 insert success: 2500"] [2024/05/04 22:14:15.504 +08:00] [INFO] [main.go:220] ["1 insert success: 2500"] [2024/05/04 22:14:15.664 +08:00] [INFO] [main.go:220] ["0 insert success: 2600"] [2024/05/04 22:14:15.667 +08:00] [INFO] [main.go:234] ["0 delete success: 1300"] 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 1833 0 --:--:-- --:--:-- --:--:-- 1848 + info='{"state":"warning","resolved_ts":449532934261833736,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449532934261833736,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449532934261833736,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449532934261833736,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \n\o\r\m\a\l ]] + echo 'changefeed state warning does not equal to normal' changefeed state warning does not equal to normal + exit 1 run task failed 1-th time, retry later [2024/05/04 22:14:15.702 +08:00] [INFO] [main.go:220] ["1 insert success: 2600"] [2024/05/04 22:14:15.704 +08:00] [INFO] [main.go:234] ["1 delete success: 1300"] [2024/05/04 22:14:15.851 +08:00] [INFO] [main.go:220] ["0 insert success: 2700"] [2024/05/04 22:14:15.887 +08:00] [INFO] [main.go:220] ["1 insert success: 2700"] check diff successfully table mark.finish_mark_3 not exists for 3-th check, retry later ***************** properties ***************** "mysql.port"="4000" "dotransactions"="false" "table"="a1" "mysql.user"="root" "operationcount"="0" "mysql.db"="multi_rocks" "updateproportion"="0" "recordcount"="1000" "readallfields"="true" "mysql.host"="127.0.0.1" "scanproportion"="0" "requestdistribution"="uniform" "workload"="core" "threadcount"="2" "insertproportion"="0" "readproportion"="0" ********************************************** + set +x check_changefeed_state http://127.0.0.1:2379 changefeed-initialize-error normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=changefeed-initialize-error + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-initialize-error -s [2024/05/04 22:14:16.041 +08:00] [INFO] [main.go:220] ["0 insert success: 2800"] [2024/05/04 22:14:16.043 +08:00] [INFO] [main.go:234] ["0 delete success: 1400"] + info='{ "upstream_id": 7365147271517687926, "namespace": "default", "id": "changefeed-initialize-error", "state": "normal", "checkpoint_tso": 449532933188091908, "checkpoint_time": "2024-05-04 22:14:11.041", "error": null }' + echo '{ "upstream_id": 7365147271517687926, "namespace": "default", "id": "changefeed-initialize-error", "state": "normal", "checkpoint_tso": 449532933188091908, "checkpoint_time": "2024-05-04 22:14:11.041", "error": null }' { "upstream_id": 7365147271517687926, "namespace": "default", "id": "changefeed-initialize-error", "state": "normal", "checkpoint_tso": 449532933188091908, "checkpoint_time": "2024-05-04 22:14:11.041", "error": null } ++ echo '{' '"upstream_id":' 7365147271517687926, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"normal",' '"checkpoint_tso":' 449532933188091908, '"checkpoint_time":' '"2024-05-04' '22:14:11.041",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365147271517687926, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"normal",' '"checkpoint_tso":' 449532933188091908, '"checkpoint_time":' '"2024-05-04' '22:14:11.041",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.7125.out cli changefeed remove -c changefeed-initialize-error Run finished, takes 637.704955ms INSERT - Takes(s): 0.6, Count: 1000, OPS: 1599.4, Avg(us): 1242, Min(us): 919, Max(us): 12378, 95th(us): 2000, 99th(us): 2000 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:14:16 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/775d725f-cb77-48a6-9b35-d985bba565b4 {"id":"775d725f-cb77-48a6-9b35-d985bba565b4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832053} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f446fdf9 775d725f-cb77-48a6-9b35-d985bba565b4 /tidb/cdc/default/default/upstream/7365147516106888906 {"id":7365147516106888906,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/775d725f-cb77-48a6-9b35-d985bba565b4 {"id":"775d725f-cb77-48a6-9b35-d985bba565b4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832053} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f446fdf9 775d725f-cb77-48a6-9b35-d985bba565b4 /tidb/cdc/default/default/upstream/7365147516106888906 {"id":7365147516106888906,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/775d725f-cb77-48a6-9b35-d985bba565b4 {"id":"775d725f-cb77-48a6-9b35-d985bba565b4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832053} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f446fdf9 775d725f-cb77-48a6-9b35-d985bba565b4 /tidb/cdc/default/default/upstream/7365147516106888906 {"id":7365147516106888906,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x ***************** properties ***************** "updateproportion"="0" "insertproportion"="0" "dotransactions"="false" "readallfields"="true" "mysql.user"="root" "mysql.db"="multi_rocks" "threadcount"="2" "readproportion"="0" "workload"="core" "mysql.port"="4000" "table"="a2" "scanproportion"="0" "operationcount"="0" "requestdistribution"="uniform" "recordcount"="1000" "mysql.host"="127.0.0.1" ********************************************** Changefeed remove successfully. ID: changefeed-initialize-error CheckpointTs: 449532934498811906 SinkURI: kafka://127.0.0.1:9092/ticdc-sink-retry-test-8220?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... table test.finish_mark not exists for 1-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd27ff80014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc, pid:10702, start at 2024-05-04 22:14:16.368452446 +0800 CST m=+5.357598010 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:16:16.375 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:14:16.368 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:04:16.368 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check_changefeed_status 127.0.0.1:8300 7d1a8586-68af-4913-9f72-854aa7238def normal + endpoint=127.0.0.1:8300 + changefeed_id=7d1a8586-68af-4913-9f72-854aa7238def + expected_state=normal + field= + error_pattern= ++ curl 127.0.0.1:8300/api/v2/changefeeds/7d1a8586-68af-4913-9f72-854aa7238def/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed check diff failed 1-th time, retry later Run finished, takes 627.893367ms INSERT - Takes(s): 0.6, Count: 1000, OPS: 1638.4, Avg(us): 1222, Min(us): 878, Max(us): 17474, 95th(us): 2000, 99th(us): 2000 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 2235 0 --:--:-- --:--:-- --:--:-- 2238 + info='{"state":"warning","resolved_ts":449532934786121736,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449532934786121736,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449532934786121736,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449532934786121736,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \n\o\r\m\a\l ]] + echo 'changefeed state warning does not equal to normal' changefeed state warning does not equal to normal + exit 1 run task failed 2-th time, retry later ***************** properties ***************** "requestdistribution"="uniform" "updateproportion"="0" "threadcount"="2" "operationcount"="0" "mysql.user"="root" "table"="a3" "insertproportion"="0" "readproportion"="0" "readallfields"="true" "workload"="core" "recordcount"="1000" "dotransactions"="false" "mysql.db"="multi_rocks" "mysql.host"="127.0.0.1" "scanproportion"="0" "mysql.port"="4000" ********************************************** [Sat May 4 22:14:17 CST 2024] <<<<<< START kafka consumer in capture_session_done_during_task case >>>>>> lease 22318f43f446fdf9 revoked table mark.finish_mark_3 not exists for 4-th check, retry later + set +x table test.finish_mark not exists for 2-th check, retry later \033[0;36m<<< Run all test success >>>\033[0m [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc already exists) Run finished, takes 631.346882ms INSERT - Takes(s): 0.6, Count: 1000, OPS: 1627.1, Avg(us): 1229, Min(us): 876, Max(us): 16702, 95th(us): 2000, 99th(us): 2000 [Pipeline] // cache [Pipeline] } table capture_session_done_during_task.t exists check diff failed 1-th time, retry later [Pipeline] // dir Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release wait process cdc.test exit for 1-th time... [Pipeline] } ***************** properties ***************** "dotransactions"="false" "insertproportion"="0" "recordcount"="1000" "operationcount"="0" "threadcount"="2" "mysql.port"="4000" "mysql.user"="root" "scanproportion"="0" "updateproportion"="0" "readallfields"="true" "workload"="core" "readproportion"="0" "requestdistribution"="uniform" "mysql.host"="127.0.0.1" "mysql.db"="multi_rocks" "table"="a4" ********************************************** [Pipeline] // withCredentials [Pipeline] } [Pipeline] // timeout [Pipeline] } VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd27ff80014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc, pid:10702, start at 2024-05-04 22:14:16.368452446 +0800 CST m=+5.357598010 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:16:16.375 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:14:16.368 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:04:16.368 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd2814c0003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc, pid:10781, start at 2024-05-04 22:14:16.406888661 +0800 CST m=+5.330691943 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:16:16.415 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:14:16.403 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:04:16.403 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/error.log arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash-proxy.toml"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [Pipeline] // stage [Pipeline] } check diff failed 2-th time, retry later [Pipeline] // container [Pipeline] } wait process cdc.test exit for 2-th time... [Pipeline] // withEnv [Pipeline] } Run finished, takes 611.861257ms INSERT - Takes(s): 0.6, Count: 999, OPS: 1690.3, Avg(us): 1172, Min(us): 846, Max(us): 20777, 95th(us): 2000, 99th(us): 2000 [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] } \033[0;36m<<< Run all test success >>>\033[0m table mark.finish_mark_3 not exists for 5-th check, retry later cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:14:20 CST 2024] <<<<<< run test case changefeed_error success! >>>>>> [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc already exists) ***************** properties ***************** "workload"="core" "readproportion"="0" "mysql.port"="4000" "insertproportion"="0" "updateproportion"="0" "table"="a5" "operationcount"="0" "recordcount"="1000" "scanproportion"="0" "mysql.host"="127.0.0.1" "requestdistribution"="uniform" "threadcount"="2" "dotransactions"="false" "readallfields"="true" "mysql.user"="root" "mysql.db"="multi_rocks" ********************************************** [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // withCredentials [Pipeline] } table test.finish_mark not exists for 3-th check, retry later [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] } Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } Run finished, takes 605.064106ms INSERT - Takes(s): 0.6, Count: 1000, OPS: 1696.7, Avg(us): 1176, Min(us): 873, Max(us): 15621, 95th(us): 2000, 99th(us): 2000 + cd /tmp/tidb_cdc_test/synced_status_with_redo ++ run_cdc_cli_tso_query 127.0.0.1 2379 + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.cli.12129.out cli tso query --pd=http://127.0.0.1:2379 [Pipeline] // stage [2024/05/04 22:14:21.247 +08:00] [INFO] [main.go:86] ["running ddl test: 3 addDropColumnDDL2"] [Pipeline] } check diff failed 3-th time, retry later check diff failed 2-th time, retry later [2024/05/04 22:14:21.449 +08:00] [INFO] [main.go:220] ["0 insert success: 100"] [2024/05/04 22:14:21.449 +08:00] [INFO] [main.go:220] ["1 insert success: 100"] [2024/05/04 22:14:21.619 +08:00] [INFO] [main.go:220] ["0 insert success: 200"] [2024/05/04 22:14:21.621 +08:00] [INFO] [main.go:220] ["1 insert success: 200"] [2024/05/04 22:14:21.621 +08:00] [INFO] [main.go:234] ["0 delete success: 100"] [2024/05/04 22:14:21.623 +08:00] [INFO] [main.go:234] ["1 delete success: 100"] [2024/05/04 22:14:21.802 +08:00] [INFO] [main.go:220] ["0 insert success: 300"] [2024/05/04 22:14:21.802 +08:00] [INFO] [main.go:220] ["1 insert success: 300"] check_changefeed_status 127.0.0.1:8300 7d1a8586-68af-4913-9f72-854aa7238def normal + endpoint=127.0.0.1:8300 + changefeed_id=7d1a8586-68af-4913-9f72-854aa7238def + expected_state=normal + field= + error_pattern= ++ curl 127.0.0.1:8300/api/v2/changefeeds/7d1a8586-68af-4913-9f72-854aa7238def/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 2221 0 --:--:-- --:--:-- --:--:-- 2238 + info='{"state":"warning","resolved_ts":449532935834697736,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449532935834697736,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449532935834697736,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449532935834697736,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \n\o\r\m\a\l ]] + echo 'changefeed state warning does not equal to normal' changefeed state warning does not equal to normal + exit 1 run task failed 3-th time, retry later [2024/05/04 22:14:21.984 +08:00] [INFO] [main.go:220] ["0 insert success: 400"] [2024/05/04 22:14:21.986 +08:00] [INFO] [main.go:234] ["0 delete success: 200"] [2024/05/04 22:14:21.988 +08:00] [INFO] [main.go:220] ["1 insert success: 400"] [2024/05/04 22:14:21.990 +08:00] [INFO] [main.go:234] ["1 delete success: 200"] [2024/05/04 22:14:22.158 +08:00] [INFO] [main.go:220] ["0 insert success: 500"] [2024/05/04 22:14:22.161 +08:00] [INFO] [main.go:220] ["1 insert success: 500"] table mark.finish_mark_3 not exists for 6-th check, retry later [2024/05/04 22:14:22.338 +08:00] [INFO] [main.go:220] ["0 insert success: 600"] [2024/05/04 22:14:22.340 +08:00] [INFO] [main.go:234] ["0 delete success: 300"] [2024/05/04 22:14:22.343 +08:00] [INFO] [main.go:220] ["1 insert success: 600"] [2024/05/04 22:14:22.345 +08:00] [INFO] [main.go:234] ["1 delete success: 300"] table test.finish_mark not exists for 4-th check, retry later [2024/05/04 22:14:22.513 +08:00] [INFO] [main.go:220] ["1 insert success: 700"] [2024/05/04 22:14:22.514 +08:00] [INFO] [main.go:220] ["0 insert success: 700"] [2024/05/04 22:14:22.696 +08:00] [INFO] [main.go:220] ["1 insert success: 800"] [2024/05/04 22:14:22.698 +08:00] [INFO] [main.go:234] ["1 delete success: 400"] [2024/05/04 22:14:22.702 +08:00] [INFO] [main.go:220] ["0 insert success: 800"] [2024/05/04 22:14:22.704 +08:00] [INFO] [main.go:234] ["0 delete success: 400"] table multi_rocks.finish_mark not exists for 1-th check, retry later [2024/05/04 22:14:22.869 +08:00] [INFO] [main.go:220] ["1 insert success: 900"] [2024/05/04 22:14:22.880 +08:00] [INFO] [main.go:220] ["0 insert success: 900"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:14:23.036 +08:00] [INFO] [main.go:220] ["1 insert success: 1000"] [2024/05/04 22:14:23.038 +08:00] [INFO] [main.go:234] ["1 delete success: 500"] [2024/05/04 22:14:23.052 +08:00] [INFO] [main.go:220] ["0 insert success: 1000"] [2024/05/04 22:14:23.054 +08:00] [INFO] [main.go:234] ["0 delete success: 500"] [2024/05/04 22:14:23.210 +08:00] [INFO] [main.go:220] ["1 insert success: 1100"] [2024/05/04 22:14:23.242 +08:00] [INFO] [main.go:220] ["0 insert success: 1100"] + set +x + tso='449532935974158338 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532935974158338 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + start_ts=449532935974158338 + run_cdc_server --workdir /tmp/tidb_cdc_test/synced_status_with_redo --binary cdc.test [Sat May 4 22:14:23 CST 2024] <<<<<< START cdc server in synced_status_with_redo case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.1216912171.out server --log-file /tmp/tidb_cdc_test/synced_status_with_redo/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/synced_status_with_redo/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [2024/05/04 22:14:23.389 +08:00] [INFO] [main.go:220] ["1 insert success: 1200"] [2024/05/04 22:14:23.391 +08:00] [INFO] [main.go:234] ["1 delete success: 600"] [2024/05/04 22:14:23.427 +08:00] [INFO] [main.go:220] ["0 insert success: 1200"] [2024/05/04 22:14:23.429 +08:00] [INFO] [main.go:234] ["0 delete success: 600"] check diff failed 3-th time, retry later [2024/05/04 22:14:23.564 +08:00] [INFO] [main.go:220] ["1 insert success: 1300"] [2024/05/04 22:14:23.612 +08:00] [INFO] [main.go:220] ["0 insert success: 1300"] [2024/05/04 22:14:23.746 +08:00] [INFO] [main.go:220] ["1 insert success: 1400"] [2024/05/04 22:14:23.748 +08:00] [INFO] [main.go:234] ["1 delete success: 700"] [2024/05/04 22:14:23.795 +08:00] [INFO] [main.go:220] ["0 insert success: 1400"] [2024/05/04 22:14:23.797 +08:00] [INFO] [main.go:234] ["0 delete success: 700"] [2024/05/04 22:14:23.924 +08:00] [INFO] [main.go:220] ["1 insert success: 1500"] [2024/05/04 22:14:23.975 +08:00] [INFO] [main.go:220] ["0 insert success: 1500"] check diff failed 4-th time, retry later table mark.finish_mark_3 not exists for 7-th check, retry later [2024/05/04 22:14:24.105 +08:00] [INFO] [main.go:220] ["1 insert success: 1600"] [2024/05/04 22:14:24.107 +08:00] [INFO] [main.go:234] ["1 delete success: 800"] [2024/05/04 22:14:24.170 +08:00] [INFO] [main.go:220] ["0 insert success: 1600"] [2024/05/04 22:14:24.172 +08:00] [INFO] [main.go:234] ["0 delete success: 800"] [2024/05/04 22:14:24.282 +08:00] [INFO] [main.go:220] ["1 insert success: 1700"] [2024/05/04 22:14:24.356 +08:00] [INFO] [main.go:220] ["0 insert success: 1700"] [2024/05/04 22:14:24.456 +08:00] [INFO] [main.go:220] ["1 insert success: 1800"] [2024/05/04 22:14:24.458 +08:00] [INFO] [main.go:234] ["1 delete success: 900"] [2024/05/04 22:14:24.535 +08:00] [INFO] [main.go:220] ["0 insert success: 1800"] [2024/05/04 22:14:24.537 +08:00] [INFO] [main.go:234] ["0 delete success: 900"] [2024/05/04 22:14:24.637 +08:00] [INFO] [main.go:220] ["1 insert success: 1900"] [2024/05/04 22:14:24.721 +08:00] [INFO] [main.go:220] ["0 insert success: 1900"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:14:24.823 +08:00] [INFO] [main.go:220] ["1 insert success: 2000"] [2024/05/04 22:14:24.825 +08:00] [INFO] [main.go:234] ["1 delete success: 1000"] [2024/05/04 22:14:24.896 +08:00] [INFO] [main.go:220] ["0 insert success: 2000"] [2024/05/04 22:14:24.898 +08:00] [INFO] [main.go:234] ["0 delete success: 1000"] [2024/05/04 22:14:24.995 +08:00] [INFO] [main.go:220] ["1 insert success: 2100"] =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/move_table/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table test.finish_mark not exists for 5-th check, retry later table multi_rocks.finish_mark not exists for 2-th check, retry later [2024/05/04 22:14:25.077 +08:00] [INFO] [main.go:220] ["0 insert success: 2100"] [2024/05/04 22:14:25.197 +08:00] [INFO] [main.go:220] ["1 insert success: 2200"] [2024/05/04 22:14:25.199 +08:00] [INFO] [main.go:234] ["1 delete success: 1100"] [2024/05/04 22:14:25.267 +08:00] [INFO] [main.go:220] ["0 insert success: 2200"] [2024/05/04 22:14:25.269 +08:00] [INFO] [main.go:234] ["0 delete success: 1100"] check diff successfully check diff failed 1-th time, retry later [2024/05/04 22:14:25.378 +08:00] [INFO] [main.go:220] ["1 insert success: 2300"] [2024/05/04 22:14:25.444 +08:00] [INFO] [main.go:220] ["0 insert success: 2300"] [2024/05/04 22:14:25.555 +08:00] [INFO] [main.go:220] ["1 insert success: 2400"] [2024/05/04 22:14:25.557 +08:00] [INFO] [main.go:234] ["1 delete success: 1200"] [2024/05/04 22:14:25.622 +08:00] [INFO] [main.go:220] ["0 insert success: 2400"] [2024/05/04 22:14:25.623 +08:00] [INFO] [main.go:234] ["0 delete success: 1200"] [2024/05/04 22:14:25.732 +08:00] [INFO] [main.go:220] ["1 insert success: 2500"] [2024/05/04 22:14:25.799 +08:00] [INFO] [main.go:220] ["0 insert success: 2500"] check diff failed 5-th time, retry later table mark.finish_mark_3 not exists for 8-th check, retry later [2024/05/04 22:14:25.915 +08:00] [INFO] [main.go:220] ["1 insert success: 2600"] [2024/05/04 22:14:25.917 +08:00] [INFO] [main.go:234] ["1 delete success: 1300"] [2024/05/04 22:14:25.979 +08:00] [INFO] [main.go:220] ["0 insert success: 2600"] [2024/05/04 22:14:25.981 +08:00] [INFO] [main.go:234] ["0 delete success: 1300"] [2024/05/04 22:14:26.089 +08:00] [INFO] [main.go:220] ["1 insert success: 2700"] [2024/05/04 22:14:26.152 +08:00] [INFO] [main.go:220] ["0 insert success: 2700"] + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:14:26 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/de9dd12b-a489-4eb8-9680-17d335ffb88a {"id":"de9dd12b-a489-4eb8-9680-17d335ffb88a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832063} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f47b41d2 de9dd12b-a489-4eb8-9680-17d335ffb88a /tidb/cdc/default/default/upstream/7365147560213462438 {"id":7365147560213462438,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/de9dd12b-a489-4eb8-9680-17d335ffb88a {"id":"de9dd12b-a489-4eb8-9680-17d335ffb88a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832063} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f47b41d2 de9dd12b-a489-4eb8-9680-17d335ffb88a /tidb/cdc/default/default/upstream/7365147560213462438 {"id":7365147560213462438,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/de9dd12b-a489-4eb8-9680-17d335ffb88a {"id":"de9dd12b-a489-4eb8-9680-17d335ffb88a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832063} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f47b41d2 de9dd12b-a489-4eb8-9680-17d335ffb88a /tidb/cdc/default/default/upstream/7365147560213462438 {"id":7365147560213462438,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + config_path=conf/changefeed-redo.toml + SINK_URI='mysql://root@127.0.0.1:3306/?max-txn-row=1' + run_cdc_cli changefeed create --start-ts=449532935974158338 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/conf/changefeed-redo.toml + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.cli.12218.out cli changefeed create --start-ts=449532935974158338 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/conf/changefeed-redo.toml Create changefeed successfully! ID: test-1 Info: {"upstream_id":7365147560213462438,"namespace":"default","id":"test-1","sink_uri":"mysql://root@127.0.0.1:3306/?max-txn-row=1","create_time":"2024-05-04T22:14:26.800345977+08:00","start_ts":449532935974158338,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"eventual","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"storage":"file:///tmp/tidb_cdc_test/synced_status/redo","use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":120,"checkpoint_interval":20}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532935974158338,"checkpoint_ts":449532935974158338,"checkpoint_time":"2024-05-04 22:14:21.669"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... table test.finish_mark not exists for 6-th check, retry later table multi_rocks.finish_mark not exists for 3-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd314d80014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-r9cq4-jzgfv, pid:10078, start at 2024-05-04 22:14:25.859821139 +0800 CST m=+5.156021157 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:16:25.866 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:14:25.846 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:04:25.846 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd314d80014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-r9cq4-jzgfv, pid:10078, start at 2024-05-04 22:14:25.859821139 +0800 CST m=+5.156021157 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:16:25.866 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:14:25.846 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:04:25.846 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd3165c0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-r9cq4-jzgfv, pid:10168, start at 2024-05-04 22:14:25.969876353 +0800 CST m=+5.209451925 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:16:25.976 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:14:25.943 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:04:25.943 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/partition_table/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/partition_table/tiflash/log/error.log arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/partition_table/tiflash-proxy.toml"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/partition_table/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/partition_table/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } check diff successfully check diff successfully wait process cdc.test exit for 1-th time... + set +x ++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 221 100 221 0 0 1492 0 --:--:-- --:--:-- --:--:-- 1493 + synced_status='{"synced":true,"sink_checkpoint_ts":"2024-05-04 22:14:21.669","puller_resolved_ts":"1970-01-01 08:00:00.000","last_synced_ts":"1970-01-01 08:00:00.000","now_ts":"2024-05-04 22:14:28.000","info":"Data syncing is finished"}' ++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-04' '22:14:21.669","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-05-04' '22:14:28.000","info":"Data' syncing is 'finished"}' ++ jq .synced table mark.finish_mark_3 not exists for 9-th check, retry later check_changefeed_status 127.0.0.1:8300 7d1a8586-68af-4913-9f72-854aa7238def normal + endpoint=127.0.0.1:8300 + changefeed_id=7d1a8586-68af-4913-9f72-854aa7238def + expected_state=normal + field= + error_pattern= ++ curl 127.0.0.1:8300/api/v2/changefeeds/7d1a8586-68af-4913-9f72-854aa7238def/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 2190 0 --:--:-- --:--:-- --:--:-- 2198 + info='{"state":"warning","resolved_ts":449532937407561738,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449532937407561738,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449532937407561738,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449532937407561738,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \n\o\r\m\a\l ]] + echo 'changefeed state warning does not equal to normal' changefeed state warning does not equal to normal + exit 1 run task failed 4-th time, retry later + status=true ++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-04' '22:14:21.669","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-05-04' '22:14:28.000","info":"Data' syncing is 'finished"}' ++ jq -r .sink_checkpoint_ts + sink_checkpoint_ts='2024-05-04 22:14:21.669' ++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-04' '22:14:21.669","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-05-04' '22:14:28.000","info":"Data' syncing is 'finished"}' ++ jq -r .puller_resolved_ts + puller_resolved_ts='1970-01-01 08:00:00.000' ++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-04' '22:14:21.669","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-05-04' '22:14:28.000","info":"Data' syncing is 'finished"}' ++ jq -r .last_synced_ts + last_synced_ts='1970-01-01 08:00:00.000' + '[' true '!=' true ']' + '[' '1970-01-01 08:00:00.000' '!=' '1970-01-01 08:00:00.000' ']' + '[' '1970-01-01 08:00:00.000' '!=' '1970-01-01 08:00:00.000' ']' ++ date '+%Y-%m-%d %H:%M:%S' + current='2024-05-04 22:14:28' + echo 'sink_checkpoint_ts is 2024-05-04' 22:14:21.669 sink_checkpoint_ts is 2024-05-04 22:14:21.669 ++ date -d '2024-05-04 22:14:21.669' +%s + checkpoint_timestamp=1714832061 ++ date -d '2024-05-04 22:14:28' +%s + current_timestamp=1714832068 + '[' 7 -gt 300 ']' + run_sql 'USE TEST;Create table t1(a int primary key, b int);insert into t1 values(1,2);insert into t1 values(2,3);' + check_table_exists test.t1 127.0.0.1 3306 table test.t1 not exists for 1-th check, retry later wait process cdc.test exit for 2-th time... table test.finish_mark not exists for 7-th check, retry later table multi_rocks.finish_mark exists check diff successfully + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.partition_table.cli.11598.out cli tso query --pd=http://127.0.0.1:2379 cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:14:29 CST 2024] <<<<<< run test case capture_session_done_during_task success! >>>>>> wait process cdc.test exit for 1-th time... check diff failed 1-th time, retry later wait process cdc.test exit for 2-th time... table mark.finish_mark_3 not exists for 10-th check, retry later wait process cdc.test exit for 3-th time... table test.finish_mark not exists for 8-th check, retry later cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 22:14:30 CST 2024] <<<<<< run test case multi_rocks success! >>>>>> + set +x + tso='449532937986637825 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532937986637825 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 22:14:30 CST 2024] <<<<<< START cdc server in partition_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + GO_FAILPOINTS= + etcd_info_msg='etcd info' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.partition_table.1164711649.out server --log-file /tmp/tidb_cdc_test/partition_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/partition_table/cdc_data --cluster-id default + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table test.t1 exists + sleep 5 [2024/05/04 22:14:31.480 +08:00] [INFO] [main.go:86] ["running ddl test: 4 modifyColumnDDL"] [2024/05/04 22:14:31.671 +08:00] [INFO] [main.go:220] ["0 insert success: 100"] [2024/05/04 22:14:31.681 +08:00] [INFO] [main.go:220] ["1 insert success: 100"] [2024/05/04 22:14:31.831 +08:00] [INFO] [main.go:220] ["0 insert success: 200"] [2024/05/04 22:14:31.833 +08:00] [INFO] [main.go:234] ["0 delete success: 100"] [2024/05/04 22:14:31.854 +08:00] [INFO] [main.go:220] ["1 insert success: 200"] [2024/05/04 22:14:31.856 +08:00] [INFO] [main.go:234] ["1 delete success: 100"] [2024/05/04 22:14:32.022 +08:00] [INFO] [main.go:220] ["0 insert success: 300"] [2024/05/04 22:14:32.040 +08:00] [INFO] [main.go:220] ["1 insert success: 300"] table mark.finish_mark_3 not exists for 11-th check, retry later check diff failed 2-th time, retry later [2024/05/04 22:14:32.197 +08:00] [INFO] [main.go:220] ["0 insert success: 400"] [2024/05/04 22:14:32.199 +08:00] [INFO] [main.go:234] ["0 delete success: 200"] [2024/05/04 22:14:32.207 +08:00] [INFO] [main.go:220] ["1 insert success: 400"] [2024/05/04 22:14:32.209 +08:00] [INFO] [main.go:234] ["1 delete success: 200"] =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/ddl_sequence/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [2024/05/04 22:14:32.378 +08:00] [INFO] [main.go:220] ["0 insert success: 500"] [2024/05/04 22:14:32.385 +08:00] [INFO] [main.go:220] ["1 insert success: 500"] [2024/05/04 22:14:32.550 +08:00] [INFO] [main.go:220] ["0 insert success: 600"] [2024/05/04 22:14:32.552 +08:00] [INFO] [main.go:234] ["0 delete success: 300"] [2024/05/04 22:14:32.557 +08:00] [INFO] [main.go:220] ["1 insert success: 600"] [2024/05/04 22:14:32.559 +08:00] [INFO] [main.go:234] ["1 delete success: 300"] table test.finish_mark exists check diff successfully [2024/05/04 22:14:32.727 +08:00] [INFO] [main.go:220] ["0 insert success: 700"] [2024/05/04 22:14:32.734 +08:00] [INFO] [main.go:220] ["1 insert success: 700"] [2024/05/04 22:14:32.901 +08:00] [INFO] [main.go:220] ["0 insert success: 800"] [2024/05/04 22:14:32.903 +08:00] [INFO] [main.go:234] ["0 delete success: 400"] [2024/05/04 22:14:32.909 +08:00] [INFO] [main.go:220] ["1 insert success: 800"] [2024/05/04 22:14:32.911 +08:00] [INFO] [main.go:234] ["1 delete success: 400"] [2024/05/04 22:14:33.064 +08:00] [INFO] [main.go:220] ["0 insert success: 900"] [2024/05/04 22:14:33.082 +08:00] [INFO] [main.go:220] ["1 insert success: 900"] start tidb cluster in /tmp/tidb_cdc_test/move_table Starting Upstream PD... wait process cdc.test exit for 1-th time... [2024/05/04 22:14:33.226 +08:00] [INFO] [main.go:220] ["0 insert success: 1000"] [2024/05/04 22:14:33.228 +08:00] [INFO] [main.go:234] ["0 delete success: 500"] [2024/05/04 22:14:33.249 +08:00] [INFO] [main.go:220] ["1 insert success: 1000"] [2024/05/04 22:14:33.250 +08:00] [INFO] [main.go:234] ["1 delete success: 500"] Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... [2024/05/04 22:14:33.390 +08:00] [INFO] [main.go:220] ["0 insert success: 1100"] [2024/05/04 22:14:33.410 +08:00] [INFO] [main.go:220] ["1 insert success: 1100"] [2024/05/04 22:14:33.560 +08:00] [INFO] [main.go:220] ["0 insert success: 1200"] [2024/05/04 22:14:33.562 +08:00] [INFO] [main.go:234] ["0 delete success: 600"] [2024/05/04 22:14:33.586 +08:00] [INFO] [main.go:220] ["1 insert success: 1200"] [2024/05/04 22:14:33.588 +08:00] [INFO] [main.go:234] ["1 delete success: 600"] [2024/05/04 22:14:33.738 +08:00] [INFO] [main.go:220] ["0 insert success: 1300"] [2024/05/04 22:14:33.764 +08:00] [INFO] [main.go:220] ["1 insert success: 1300"] wait process cdc.test exit for 2-th time... check diff failed 3-th time, retry later table mark.finish_mark_3 not exists for 12-th check, retry later [2024/05/04 22:14:33.911 +08:00] [INFO] [main.go:220] ["0 insert success: 1400"] [2024/05/04 22:14:33.912 +08:00] [INFO] [main.go:234] ["0 delete success: 700"] [2024/05/04 22:14:33.936 +08:00] [INFO] [main.go:220] ["1 insert success: 1400"] [2024/05/04 22:14:33.938 +08:00] [INFO] [main.go:234] ["1 delete success: 700"] [2024/05/04 22:14:34.080 +08:00] [INFO] [main.go:220] ["0 insert success: 1500"] [2024/05/04 22:14:34.098 +08:00] [INFO] [main.go:220] ["1 insert success: 1500"] + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:14:33 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2e96f503-b743-45fe-88be-dba712d676a3 {"id":"2e96f503-b743-45fe-88be-dba712d676a3","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832071} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f49acece 2e96f503-b743-45fe-88be-dba712d676a3 /tidb/cdc/default/default/upstream/7365147605422010419 {"id":7365147605422010419,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2e96f503-b743-45fe-88be-dba712d676a3 {"id":"2e96f503-b743-45fe-88be-dba712d676a3","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832071} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f49acece 2e96f503-b743-45fe-88be-dba712d676a3 /tidb/cdc/default/default/upstream/7365147605422010419 {"id":7365147605422010419,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2e96f503-b743-45fe-88be-dba712d676a3 {"id":"2e96f503-b743-45fe-88be-dba712d676a3","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832071} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f49acece 2e96f503-b743-45fe-88be-dba712d676a3 /tidb/cdc/default/default/upstream/7365147605422010419 {"id":7365147605422010419,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.partition_table.cli.11704.out cli changefeed create --start-ts=449532937986637825 '--sink-uri=kafka://127.0.0.1:9092/ticdc-partition-table-test-2302?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' [2024/05/04 22:14:34.254 +08:00] [INFO] [main.go:220] ["0 insert success: 1600"] [2024/05/04 22:14:34.256 +08:00] [INFO] [main.go:234] ["0 delete success: 800"] [2024/05/04 22:14:34.268 +08:00] [INFO] [main.go:220] ["1 insert success: 1600"] [2024/05/04 22:14:34.270 +08:00] [INFO] [main.go:234] ["1 delete success: 800"] wait process cdc.test exit for 3-th time... Create changefeed successfully! ID: 052b470f-ec56-45a8-8feb-ac1b03e2169a Info: {"upstream_id":7365147605422010419,"namespace":"default","id":"052b470f-ec56-45a8-8feb-ac1b03e2169a","sink_uri":"kafka://127.0.0.1:9092/ticdc-partition-table-test-2302?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:14:34.366497792+08:00","start_ts":449532937986637825,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532937986637825,"checkpoint_ts":449532937986637825,"checkpoint_time":"2024-05-04 22:14:29.346"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... [2024/05/04 22:14:34.425 +08:00] [INFO] [main.go:220] ["0 insert success: 1700"] [2024/05/04 22:14:34.439 +08:00] [INFO] [main.go:220] ["1 insert success: 1700"] [2024/05/04 22:14:34.594 +08:00] [INFO] [main.go:220] ["0 insert success: 1800"] [2024/05/04 22:14:34.596 +08:00] [INFO] [main.go:234] ["0 delete success: 900"] [2024/05/04 22:14:34.611 +08:00] [INFO] [main.go:220] ["1 insert success: 1800"] [2024/05/04 22:14:34.613 +08:00] [INFO] [main.go:234] ["1 delete success: 900"] [2024/05/04 22:14:34.761 +08:00] [INFO] [main.go:220] ["0 insert success: 1900"] [2024/05/04 22:14:34.778 +08:00] [INFO] [main.go:220] ["1 insert success: 1900"] cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 22:14:34 CST 2024] <<<<<< run test case kafka_simple_basic success! >>>>>> [2024/05/04 22:14:34.934 +08:00] [INFO] [main.go:220] ["0 insert success: 2000"] [2024/05/04 22:14:34.936 +08:00] [INFO] [main.go:234] ["0 delete success: 1000"] [2024/05/04 22:14:34.949 +08:00] [INFO] [main.go:220] ["1 insert success: 2000"] [2024/05/04 22:14:34.951 +08:00] [INFO] [main.go:234] ["1 delete success: 1000"] [2024/05/04 22:14:35.104 +08:00] [INFO] [main.go:220] ["0 insert success: 2100"] [2024/05/04 22:14:35.120 +08:00] [INFO] [main.go:220] ["1 insert success: 2100"] Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [2024/05/04 22:14:35.277 +08:00] [INFO] [main.go:220] ["0 insert success: 2200"] [2024/05/04 22:14:35.279 +08:00] [INFO] [main.go:234] ["0 delete success: 1100"] [2024/05/04 22:14:35.295 +08:00] [INFO] [main.go:220] ["1 insert success: 2200"] [2024/05/04 22:14:35.297 +08:00] [INFO] [main.go:234] ["1 delete success: 1100"] [2024/05/04 22:14:35.453 +08:00] [INFO] [main.go:220] ["0 insert success: 2300"] [2024/05/04 22:14:35.470 +08:00] [INFO] [main.go:220] ["1 insert success: 2300"] [2024/05/04 22:14:35.633 +08:00] [INFO] [main.go:220] ["0 insert success: 2400"] [2024/05/04 22:14:35.635 +08:00] [INFO] [main.go:234] ["0 delete success: 1200"] [2024/05/04 22:14:35.650 +08:00] [INFO] [main.go:220] ["1 insert success: 2400"] [2024/05/04 22:14:35.652 +08:00] [INFO] [main.go:234] ["1 delete success: 1200"] [2024/05/04 22:14:35.813 +08:00] [INFO] [main.go:220] ["0 insert success: 2500"] [2024/05/04 22:14:35.825 +08:00] [INFO] [main.go:220] ["1 insert success: 2500"] + set +x [Sat May 4 22:14:35 CST 2024] <<<<<< START kafka consumer in partition_table case >>>>>> check diff failed 4-th time, retry later table mark.finish_mark_3 not exists for 13-th check, retry later [2024/05/04 22:14:35.992 +08:00] [INFO] [main.go:220] ["0 insert success: 2600"] [2024/05/04 22:14:35.994 +08:00] [INFO] [main.go:234] ["0 delete success: 1300"] [2024/05/04 22:14:36.002 +08:00] [INFO] [main.go:220] ["1 insert success: 2600"] [2024/05/04 22:14:36.004 +08:00] [INFO] [main.go:234] ["1 delete success: 1300"] [2024/05/04 22:14:36.160 +08:00] [INFO] [main.go:220] ["0 insert success: 2700"] [2024/05/04 22:14:36.173 +08:00] [INFO] [main.go:220] ["1 insert success: 2700"] [2024/05/04 22:14:36.335 +08:00] [INFO] [main.go:220] ["0 insert success: 2800"] [2024/05/04 22:14:36.337 +08:00] [INFO] [main.go:234] ["0 delete success: 1400"] [2024/05/04 22:14:36.348 +08:00] [INFO] [main.go:220] ["1 insert success: 2800"] [2024/05/04 22:14:36.350 +08:00] [INFO] [main.go:234] ["1 delete success: 1400"] ++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 243 100 243 0 0 3360 0 --:--:-- --:--:-- --:--:-- 3375 + synced_status='{"synced":false,"sink_checkpoint_ts":"2024-05-04 22:14:34.069","puller_resolved_ts":"2024-05-04 22:14:28.569","last_synced_ts":"2024-05-04 22:14:28.668","now_ts":"2024-05-04 22:14:35.000","info":"The data syncing is not finished, please wait"}' ++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-04' '22:14:34.069","puller_resolved_ts":"2024-05-04' '22:14:28.569","last_synced_ts":"2024-05-04' '22:14:28.668","now_ts":"2024-05-04' '22:14:35.000","info":"The' data syncing is not finished, please 'wait"}' ++ jq .synced + status=false + '[' false '!=' false ']' ++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-04' '22:14:34.069","puller_resolved_ts":"2024-05-04' '22:14:28.569","last_synced_ts":"2024-05-04' '22:14:28.668","now_ts":"2024-05-04' '22:14:35.000","info":"The' data syncing is not finished, please 'wait"}' ++ jq -r .info + info='The data syncing is not finished, please wait' + '[' 'The data syncing is not finished, please wait' '!=' 'The data syncing is not finished, please wait' ']' + sleep 130 start tidb cluster in /tmp/tidb_cdc_test/ddl_sequence Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... check_changefeed_status 127.0.0.1:8300 7d1a8586-68af-4913-9f72-854aa7238def normal + endpoint=127.0.0.1:8300 + changefeed_id=7d1a8586-68af-4913-9f72-854aa7238def + expected_state=normal + field= + error_pattern= ++ curl 127.0.0.1:8300/api/v2/changefeeds/7d1a8586-68af-4913-9f72-854aa7238def/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 2189 0 --:--:-- --:--:-- --:--:-- 2198 + info='{"state":"warning","resolved_ts":449532939504713740,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449532939504713740,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449532939504713740,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449532939504713740,"checkpoint_ts":449532932348182562,"last_warning":{"time":"2024-05-04T22:14:10.787196368+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \n\o\r\m\a\l ]] + echo 'changefeed state warning does not equal to normal' changefeed state warning does not equal to normal + exit 1 run task failed 5-th time, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 5-th time, retry later table mark.finish_mark_3 not exists for 14-th check, retry later Verifying downstream PD is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release check diff successfully table mark.finish_mark_3 not exists for 15-th check, retry later wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/ddl_attributes/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:14:41.622 +08:00] [INFO] [main.go:86] ["running ddl test: 5 addDropIndexDDL"] wait process cdc.test exit for 3-th time... [2024/05/04 22:14:41.808 +08:00] [INFO] [main.go:220] ["0 insert success: 100"] [2024/05/04 22:14:41.815 +08:00] [INFO] [main.go:220] ["1 insert success: 100"] [2024/05/04 22:14:41.979 +08:00] [INFO] [main.go:220] ["0 insert success: 200"] [2024/05/04 22:14:41.980 +08:00] [INFO] [main.go:234] ["0 delete success: 100"] [2024/05/04 22:14:41.982 +08:00] [INFO] [main.go:234] ["1 delete success: 100"] [2024/05/04 22:14:41.983 +08:00] [INFO] [main.go:220] ["1 insert success: 200"] [2024/05/04 22:14:42.130 +08:00] [INFO] [main.go:220] ["0 insert success: 300"] [2024/05/04 22:14:42.137 +08:00] [INFO] [main.go:220] ["1 insert success: 300"] Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 22:14:42 CST 2024] <<<<<< run test case changefeed_pause_resume success! >>>>>> table mark.finish_mark_3 not exists for 16-th check, retry later [2024/05/04 22:14:42.336 +08:00] [INFO] [main.go:220] ["0 insert success: 400"] [2024/05/04 22:14:42.338 +08:00] [INFO] [main.go:234] ["0 delete success: 200"] [2024/05/04 22:14:42.345 +08:00] [INFO] [main.go:234] ["1 delete success: 200"] [2024/05/04 22:14:42.346 +08:00] [INFO] [main.go:220] ["1 insert success: 400"] [2024/05/04 22:14:42.550 +08:00] [INFO] [main.go:220] ["1 insert success: 500"] [2024/05/04 22:14:42.553 +08:00] [INFO] [main.go:220] ["0 insert success: 500"] [2024/05/04 22:14:42.757 +08:00] [INFO] [main.go:234] ["1 delete success: 300"] [2024/05/04 22:14:42.758 +08:00] [INFO] [main.go:220] ["1 insert success: 600"] [2024/05/04 22:14:42.774 +08:00] [INFO] [main.go:220] ["0 insert success: 600"] [2024/05/04 22:14:42.776 +08:00] [INFO] [main.go:234] ["0 delete success: 300"] \033[0;36m<<< Run all test success >>>\033[0m [2024/05/04 22:14:42.997 +08:00] [INFO] [main.go:220] ["1 insert success: 700"] [2024/05/04 22:14:43.007 +08:00] [INFO] [main.go:220] ["0 insert success: 700"] [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc already exists) [Pipeline] // cache [Pipeline] } VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd419f00012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-3jhbg-sn2sk, pid:23413, start at 2024-05-04 22:14:42.586779603 +0800 CST m=+5.290082996 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:16:42.593 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:14:42.556 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:04:42.556 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] // dir [2024/05/04 22:14:43.242 +08:00] [INFO] [main.go:234] ["1 delete success: 400"] [2024/05/04 22:14:43.244 +08:00] [INFO] [main.go:220] ["1 insert success: 800"] [2024/05/04 22:14:43.254 +08:00] [INFO] [main.go:220] ["0 insert success: 800"] [2024/05/04 22:14:43.256 +08:00] [INFO] [main.go:234] ["0 delete success: 400"] [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // timeout [Pipeline] } [2024/05/04 22:14:43.490 +08:00] [INFO] [main.go:220] ["1 insert success: 900"] [2024/05/04 22:14:43.516 +08:00] [INFO] [main.go:220] ["0 insert success: 900"] [Pipeline] // stage [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv start tidb cluster in /tmp/tidb_cdc_test/ddl_attributes Starting Upstream PD... [Pipeline] } [2024/05/04 22:14:43.757 +08:00] [INFO] [main.go:234] ["1 delete success: 500"] [2024/05/04 22:14:43.759 +08:00] [INFO] [main.go:220] ["1 insert success: 1000"] [2024/05/04 22:14:43.807 +08:00] [INFO] [main.go:220] ["0 insert success: 1000"] [2024/05/04 22:14:43.810 +08:00] [INFO] [main.go:234] ["0 delete success: 500"] [Pipeline] // node ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] } table partition_table.t exists table partition_table.t1 exists table partition_table.t2 not exists for 1-th check, retry later Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... [Pipeline] // podTemplate table mark.finish_mark_3 not exists for 17-th check, retry later [2024/05/04 22:14:44.059 +08:00] [INFO] [main.go:220] ["1 insert success: 1100"] [2024/05/04 22:14:44.106 +08:00] [INFO] [main.go:220] ["0 insert success: 1100"] [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [2024/05/04 22:14:44.345 +08:00] [INFO] [main.go:234] ["1 delete success: 600"] [2024/05/04 22:14:44.346 +08:00] [INFO] [main.go:220] ["1 insert success: 1200"] [2024/05/04 22:14:44.379 +08:00] [INFO] [main.go:220] ["0 insert success: 1200"] [2024/05/04 22:14:44.381 +08:00] [INFO] [main.go:234] ["0 delete success: 600"] [Pipeline] } [2024/05/04 22:14:44.545 +08:00] [INFO] [main.go:220] ["1 insert success: 1300"] [2024/05/04 22:14:44.599 +08:00] [INFO] [main.go:220] ["0 insert success: 1300"] [2024/05/04 22:14:44.819 +08:00] [INFO] [main.go:234] ["1 delete success: 700"] [2024/05/04 22:14:44.820 +08:00] [INFO] [main.go:220] ["1 insert success: 1400"] [2024/05/04 22:14:44.855 +08:00] [INFO] [main.go:220] ["0 insert success: 1400"] [2024/05/04 22:14:44.860 +08:00] [INFO] [main.go:234] ["0 delete success: 700"] [2024/05/04 22:14:45.132 +08:00] [INFO] [main.go:220] ["1 insert success: 1500"] [2024/05/04 22:14:45.163 +08:00] [INFO] [main.go:220] ["0 insert success: 1500"] [2024/05/04 22:14:45.458 +08:00] [INFO] [main.go:234] ["1 delete success: 800"] [2024/05/04 22:14:45.459 +08:00] [INFO] [main.go:220] ["1 insert success: 1600"] [2024/05/04 22:14:45.491 +08:00] [INFO] [main.go:220] ["0 insert success: 1600"] [2024/05/04 22:14:45.494 +08:00] [INFO] [main.go:234] ["0 delete success: 800"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd419f00012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-3jhbg-sn2sk, pid:23413, start at 2024-05-04 22:14:42.586779603 +0800 CST m=+5.290082996 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:16:42.593 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:14:42.556 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:04:42.556 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd41aa40008 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-3jhbg-sn2sk, pid:23486, start at 2024-05-04 22:14:42.60958984 +0800 CST m=+5.257256065 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:16:42.615 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:14:42.601 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:04:42.601 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/move_table/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/move_table/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/move_table/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/move_table/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/move_table/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [2024/05/04 22:14:45.803 +08:00] [INFO] [main.go:220] ["1 insert success: 1700"] [2024/05/04 22:14:45.822 +08:00] [INFO] [main.go:220] ["0 insert success: 1700"] [2024/05/04 22:14:46.157 +08:00] [INFO] [main.go:234] ["1 delete success: 900"] [2024/05/04 22:14:46.158 +08:00] [INFO] [main.go:220] ["1 insert success: 1800"] [2024/05/04 22:14:46.175 +08:00] [INFO] [main.go:220] ["0 insert success: 1800"] [2024/05/04 22:14:46.179 +08:00] [INFO] [main.go:234] ["0 delete success: 900"] [2024/05/04 22:14:46.538 +08:00] [INFO] [main.go:220] ["1 insert success: 1900"] [2024/05/04 22:14:46.560 +08:00] [INFO] [main.go:220] ["0 insert success: 1900"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table partition_table.t2 not exists for 2-th check, retry later table mark.finish_mark_3 not exists for 18-th check, retry later check_changefeed_status 127.0.0.1:8300 7d1a8586-68af-4913-9f72-854aa7238def normal + endpoint=127.0.0.1:8300 + changefeed_id=7d1a8586-68af-4913-9f72-854aa7238def + expected_state=normal + field= + error_pattern= ++ curl 127.0.0.1:8300/api/v2/changefeeds/7d1a8586-68af-4913-9f72-854aa7238def/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 86 100 86 0 0 770 0 --:--:-- --:--:-- --:--:-- 767 100 86 100 86 0 0 769 0 --:--:-- --:--:-- --:--:-- 767 + info='{"state":"normal","resolved_ts":449532942139260931,"checkpoint_ts":449532942139260931}' + echo '{"state":"normal","resolved_ts":449532942139260931,"checkpoint_ts":449532942139260931}' {"state":"normal","resolved_ts":449532942139260931,"checkpoint_ts":449532942139260931} ++ echo '{"state":"normal","resolved_ts":449532942139260931,"checkpoint_ts":449532942139260931}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] + [[ -z '' ]] ++ echo '{"state":"normal","resolved_ts":449532942139260931,"checkpoint_ts":449532942139260931}' ++ jq -r .last_error + error_msg=null + [[ ! null == \n\u\l\l ]] ++ echo '{"state":"normal","resolved_ts":449532942139260931,"checkpoint_ts":449532942139260931}' ++ jq -r .last_warning + error_msg=null + [[ ! null == \n\u\l\l ]] + exit 0 run task successfully table kafka_sink_error_resume.t1 exists table kafka_sink_error_resume.t2 exists check diff successfully check diff failed 1-th time, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_basic_avro/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.move_table.cli.24882.out cli tso query --pd=http://127.0.0.1:2379 table partition_table.t2 not exists for 3-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table mark.finish_mark_3 not exists for 19-th check, retry later check diff successfully Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release wait process cdc.test exit for 1-th time... start tidb cluster in /tmp/tidb_cdc_test/kafka_simple_basic_avro Starting Upstream PD... + set +x + tso='449532942865137666 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532942865137666 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x ***************** properties ***************** "recordcount"="10000" "insertproportion"="0" "updateproportion"="0" "mysql.port"="4000" "mysql.db"="move_table" "mysql.user"="root" "readallfields"="true" "requestdistribution"="uniform" "workload"="core" "mysql.host"="127.0.0.1" "scanproportion"="0" "readproportion"="0" "dotransactions"="false" "threadcount"="10" "operationcount"="0" ********************************************** Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... wait process cdc.test exit for 2-th time... table partition_table.t2 not exists for 4-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd4656c0013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-mpv2f-zft7l, pid:8265, start at 2024-05-04 22:14:47.41652579 +0800 CST m=+5.425083801 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:16:47.422 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:14:47.387 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:04:47.387 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd4656c0013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-mpv2f-zft7l, pid:8265, start at 2024-05-04 22:14:47.41652579 +0800 CST m=+5.425083801 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:16:47.422 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:14:47.387 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:04:47.387 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd467a40015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-mpv2f-zft7l, pid:8352, start at 2024-05-04 22:14:47.565313018 +0800 CST m=+5.527012982 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:16:47.571 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:14:47.529 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:04:47.529 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/ddl_sequence/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/ddl_sequence/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/ddl_sequence/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/ddl_sequence/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/ddl_sequence/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table mark.finish_mark_3 not exists for 20-th check, retry later cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:14:50 CST 2024] <<<<<< run test case kafka_sink_error_resume success! >>>>>> Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Run finished, takes 1.262107065s INSERT - Takes(s): 1.3, Count: 10000, OPS: 7950.0, Avg(us): 1219, Min(us): 801, Max(us): 6026, 95th(us): 2000, 99th(us): 3000 [Sat May 4 22:14:50 CST 2024] <<<<<< START cdc server in move_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.move_table.2496924971.out server --log-file /tmp/tidb_cdc_test/move_table/cdc1.log --log-level debug --data-dir /tmp/tidb_cdc_test/move_table/cdc_data1 --cluster-id default --addr 127.0.0.1:8300 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Verifying downstream PD is started... table partition_table.t2 exists table partition_table.finish_mark not exists for 1-th check, retry later table mark.finish_mark_3 not exists for 21-th check, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_sequence.cli.9683.out cli tso query --pd=http://127.0.0.1:2379 [2024/05/04 22:14:52.335 +08:00] [INFO] [main.go:78] ["runDDLTest take 1m1.542469752s"] table mark.finish_mark_0 exists table mark.finish_mark_1 not exists for 1-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table mark.finish_mark_3 not exists for 22-th check, retry later + set +x + tso='449532944092233730 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532944092233730 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 22:14:54 CST 2024] <<<<<< START cdc server in ddl_sequence case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_sequence.97209722.out server --log-file /tmp/tidb_cdc_test/ddl_sequence/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_sequence/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:14:54 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/75d96dae-6a9f-4ac8-8d87-b7e27a06aa94 {"id":"75d96dae-6a9f-4ac8-8d87-b7e27a06aa94","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832091} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f4e1f0f0 75d96dae-6a9f-4ac8-8d87-b7e27a06aa94 /tidb/cdc/default/default/upstream/7365147676054611453 {"id":7365147676054611453,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/75d96dae-6a9f-4ac8-8d87-b7e27a06aa94 {"id":"75d96dae-6a9f-4ac8-8d87-b7e27a06aa94","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832091} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f4e1f0f0 75d96dae-6a9f-4ac8-8d87-b7e27a06aa94 /tidb/cdc/default/default/upstream/7365147676054611453 {"id":7365147676054611453,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/75d96dae-6a9f-4ac8-8d87-b7e27a06aa94 {"id":"75d96dae-6a9f-4ac8-8d87-b7e27a06aa94","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832091} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f4e1f0f0 75d96dae-6a9f-4ac8-8d87-b7e27a06aa94 /tidb/cdc/default/default/upstream/7365147676054611453 {"id":7365147676054611453,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.move_table.cli.25027.out cli changefeed create --start-ts=449532942865137666 '--sink-uri=kafka://127.0.0.1:9092/ticdc-move-table-test-12796?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: 38b0fb8f-762d-49a4-8286-cba15ceb1451 Info: {"upstream_id":7365147676054611453,"namespace":"default","id":"38b0fb8f-762d-49a4-8286-cba15ceb1451","sink_uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-12796?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:14:54.473873401+08:00","start_ts":449532942865137666,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532942865137666,"checkpoint_ts":449532942865137666,"checkpoint_time":"2024-05-04 22:14:47.956"} PASS table partition_table.finish_mark not exists for 2-th check, retry later coverage: 2.4% of statements in github.com/pingcap/tiflow/... table mark.finish_mark_1 not exists for 2-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x [Sat May 4 22:14:55 CST 2024] <<<<<< START kafka consumer in move_table case >>>>>> [Sat May 4 22:14:55 CST 2024] <<<<<< START cdc server in move_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.move_table.2506525067.out server --log-file /tmp/tidb_cdc_test/move_table/cdc2.log --log-level debug --data-dir /tmp/tidb_cdc_test/move_table/cdc_data2 --cluster-id default --addr 127.0.0.1:8301 ++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8301 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8301; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table partition_table.finish_mark not exists for 3-th check, retry later table mark.finish_mark_3 not exists for 23-th check, retry later table mark.finish_mark_1 not exists for 3-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd4e81c0012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-v3qw7-1d792, pid:24525, start at 2024-05-04 22:14:55.768908236 +0800 CST m=+5.181075470 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:16:55.776 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:14:55.751 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:04:55.751 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd4e81c0012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-v3qw7-1d792, pid:24525, start at 2024-05-04 22:14:55.768908236 +0800 CST m=+5.181075470 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:16:55.776 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:14:55.751 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:04:55.751 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd4eb380014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-v3qw7-1d792, pid:24611, start at 2024-05-04 22:14:55.982913618 +0800 CST m=+5.329375744 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:16:55.990 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:14:55.950 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:04:55.950 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/ddl_attributes/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/ddl_attributes/tiflash/log/error.log arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/ddl_attributes/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/ddl_attributes/tiflash-proxy.toml"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/ddl_attributes/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/cli_with_auth/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:14:57 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9a770555-15e1-4e01-9659-11d9d0a136f4 {"id":"9a770555-15e1-4e01-9659-11d9d0a136f4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832094} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f4ed32d5 9a770555-15e1-4e01-9659-11d9d0a136f4 /tidb/cdc/default/default/upstream/7365147690076115653 {"id":7365147690076115653,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9a770555-15e1-4e01-9659-11d9d0a136f4 {"id":"9a770555-15e1-4e01-9659-11d9d0a136f4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832094} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f4ed32d5 9a770555-15e1-4e01-9659-11d9d0a136f4 /tidb/cdc/default/default/upstream/7365147690076115653 {"id":7365147690076115653,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9a770555-15e1-4e01-9659-11d9d0a136f4 {"id":"9a770555-15e1-4e01-9659-11d9d0a136f4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832094} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f4ed32d5 9a770555-15e1-4e01-9659-11d9d0a136f4 /tidb/cdc/default/default/upstream/7365147690076115653 {"id":7365147690076115653,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_sequence.cli.9782.out cli changefeed create --start-ts=449532944092233730 '--sink-uri=kafka://127.0.0.1:9092/ticdc-ddl-sequence-test-12435?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: 04ffe7ed-e347-46a5-92af-2c2eadc797b3 Info: {"upstream_id":7365147690076115653,"namespace":"default","id":"04ffe7ed-e347-46a5-92af-2c2eadc797b3","sink_uri":"kafka://127.0.0.1:9092/ticdc-ddl-sequence-test-12435?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:14:57.709460097+08:00","start_ts":449532944092233730,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532944092233730,"checkpoint_ts":449532944092233730,"checkpoint_time":"2024-05-04 22:14:52.637"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table partition_table.finish_mark not exists for 4-th check, retry later table mark.finish_mark_3 not exists for 24-th check, retry later table mark.finish_mark_1 not exists for 4-th check, retry later + set +x [Sat May 4 22:14:59 CST 2024] <<<<<< START kafka consumer in ddl_sequence case >>>>>> + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8301 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8301 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8301 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:14:59 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** processors info ***: changefeedID: default/38b0fb8f-762d-49a4-8286-cba15ceb1451 {UpstreamID:7365147676054611453 Namespace:default ID:38b0fb8f-762d-49a4-8286-cba15ceb1451 SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-12796?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 22:14:54.473873401 +0800 CST StartTs:449532942865137666 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc001911680 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-52-g6a342866d Epoch:449532944569073667} {CheckpointTs:449532943323889912 MinTableBarrierTs:449532945591435269 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3740de0c-97f2-4404-b8ea-d225116b9af3 {"id":"3740de0c-97f2-4404-b8ea-d225116b9af3","address":"127.0.0.1:8301","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832096} /tidb/cdc/default/__cdc_meta__/capture/75d96dae-6a9f-4ac8-8d87-b7e27a06aa94 {"id":"75d96dae-6a9f-4ac8-8d87-b7e27a06aa94","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832091} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f4e1f0f0 75d96dae-6a9f-4ac8-8d87-b7e27a06aa94 /tidb/cdc/default/__cdc_meta__/owner/22318f43f4e1f145 3740de0c-97f2-4404-b8ea-d225116b9af3 /tidb/cdc/default/default/changefeed/info/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"upstream-id":7365147676054611453,"namespace":"default","changefeed-id":"38b0fb8f-762d-49a4-8286-cba15ceb1451","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-12796?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:14:54.473873401+08:00","start-ts":449532942865137666,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532944569073667} /tidb/cdc/default/default/changefeed/status/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"checkpoint-ts":449532943323889912,"min-table-barrier-ts":449532945591435269,"admin-job-type":0} /tidb/cdc/default/default/task/position/3740de0c-97f2-4404-b8ea-d225116b9af3/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/75d96dae-6a9f-4ac8-8d87-b7e27a06aa94/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365147676054611453 {"id":7365147676054611453,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** processors info ***: changefeedID: default/38b0fb8f-762d-49a4-8286-cba15ceb1451 {UpstreamID:7365147676054611453 Namespace:default ID:38b0fb8f-762d-49a4-8286-cba15ceb1451 SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-12796?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 22:14:54.473873401 +0800 CST StartTs:449532942865137666 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc001911680 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-52-g6a342866d Epoch:449532944569073667} {CheckpointTs:449532943323889912 MinTableBarrierTs:449532945591435269 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3740de0c-97f2-4404-b8ea-d225116b9af3 {"id":"3740de0c-97f2-4404-b8ea-d225116b9af3","address":"127.0.0.1:8301","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832096} /tidb/cdc/default/__cdc_meta__/capture/75d96dae-6a9f-4ac8-8d87-b7e27a06aa94 {"id":"75d96dae-6a9f-4ac8-8d87-b7e27a06aa94","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832091} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f4e1f0f0 75d96dae-6a9f-4ac8-8d87-b7e27a06aa94 /tidb/cdc/default/__cdc_meta__/owner/22318f43f4e1f145 3740de0c-97f2-4404-b8ea-d225116b9af3 /tidb/cdc/default/default/changefeed/info/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"upstream-id":7365147676054611453,"namespace":"default","changefeed-id":"38b0fb8f-762d-49a4-8286-cba15ceb1451","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-12796?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:14:54.473873401+08:00","start-ts":449532942865137666,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532944569073667} /tidb/cdc/default/default/changefeed/status/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"checkpoint-ts":449532943323889912,"min-table-barrier-ts":449532945591435269,"admin-job-type":0} /tidb/cdc/default/default/task/position/3740de0c-97f2-4404-b8ea-d225116b9af3/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/75d96dae-6a9f-4ac8-8d87-b7e27a06aa94/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365147676054611453 {"id":7365147676054611453,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** processors info ***: changefeedID: default/38b0fb8f-762d-49a4-8286-cba15ceb1451 {UpstreamID:7365147676054611453 Namespace:default ID:38b0fb8f-762d-49a4-8286-cba15ceb1451 SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-12796?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 22:14:54.473873401 +0800 CST StartTs:449532942865137666 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc001911680 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-52-g6a342866d Epoch:449532944569073667} {CheckpointTs:449532943323889912 MinTableBarrierTs:449532945591435269 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3740de0c-97f2-4404-b8ea-d225116b9af3 {"id":"3740de0c-97f2-4404-b8ea-d225116b9af3","address":"127.0.0.1:8301","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832096} /tidb/cdc/default/__cdc_meta__/capture/75d96dae-6a9f-4ac8-8d87-b7e27a06aa94 {"id":"75d96dae-6a9f-4ac8-8d87-b7e27a06aa94","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832091} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f4e1f0f0 75d96dae-6a9f-4ac8-8d87-b7e27a06aa94 /tidb/cdc/default/__cdc_meta__/owner/22318f43f4e1f145 3740de0c-97f2-4404-b8ea-d225116b9af3 /tidb/cdc/default/default/changefeed/info/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"upstream-id":7365147676054611453,"namespace":"default","changefeed-id":"38b0fb8f-762d-49a4-8286-cba15ceb1451","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-12796?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:14:54.473873401+08:00","start-ts":449532942865137666,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532944569073667} /tidb/cdc/default/default/changefeed/status/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"checkpoint-ts":449532943323889912,"min-table-barrier-ts":449532945591435269,"admin-job-type":0} /tidb/cdc/default/default/task/position/3740de0c-97f2-4404-b8ea-d225116b9af3/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/75d96dae-6a9f-4ac8-8d87-b7e27a06aa94/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365147676054611453 {"id":7365147676054611453,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 22:14:59 CST 2024] <<<<<< START cdc server in move_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.move_table.2517625178.out server --log-file /tmp/tidb_cdc_test/move_table/cdc3.log --log-level debug --data-dir /tmp/tidb_cdc_test/move_table/cdc_data3 --cluster-id default --addr 127.0.0.1:8302 + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8302 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8302; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_attributes.cli.26004.out cli tso query --pd=http://127.0.0.1:2379 table mark.finish_mark_3 not exists for 25-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + tso='449532945812946945 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532945812946945 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 22:15:00 CST 2024] <<<<<< START cdc server in ddl_attributes case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_attributes.2604226044.out server --log-file /tmp/tidb_cdc_test/ddl_attributes/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_attributes/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 start tidb cluster in /tmp/tidb_cdc_test/cli_with_auth Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table mark.finish_mark_1 not exists for 5-th check, retry later table partition_table.finish_mark not exists for 5-th check, retry later table mark.finish_mark_3 not exists for 26-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd53ad80012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2, pid:8754, start at 2024-05-04 22:15:01.077826349 +0800 CST m=+5.210710131 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:17:01.085 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:15:01.046 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:05:01.046 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd53ad80012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2, pid:8754, start at 2024-05-04 22:15:01.077826349 +0800 CST m=+5.210710131 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:17:01.085 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:15:01.046 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:05:01.046 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd53d100014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2, pid:8839, start at 2024-05-04 22:15:01.210785883 +0800 CST m=+5.288037925 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:17:01.217 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:15:01.188 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:05:01.188 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_simple_basic_avro/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_simple_basic_avro/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_simple_basic_avro/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_simple_basic_avro/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_simple_basic_avro/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8302 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8302 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8302 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:15:02 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** processors info ***: changefeedID: default/38b0fb8f-762d-49a4-8286-cba15ceb1451 {UpstreamID:7365147676054611453 Namespace:default ID:38b0fb8f-762d-49a4-8286-cba15ceb1451 SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-12796?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 22:14:54.473873401 +0800 CST StartTs:449532942865137666 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0014be750 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-52-g6a342866d Epoch:449532944569073667} {CheckpointTs:449532946377867272 MinTableBarrierTs:449532946377867272 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3740de0c-97f2-4404-b8ea-d225116b9af3 {"id":"3740de0c-97f2-4404-b8ea-d225116b9af3","address":"127.0.0.1:8301","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832096} /tidb/cdc/default/__cdc_meta__/capture/75d96dae-6a9f-4ac8-8d87-b7e27a06aa94 {"id":"75d96dae-6a9f-4ac8-8d87-b7e27a06aa94","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832091} /tidb/cdc/default/__cdc_meta__/capture/bcd0ce48-6b0f-4dc8-b942-b80a09b84c36 {"id":"bcd0ce48-6b0f-4dc8-b942-b80a09b84c36","address":"127.0.0.1:8302","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832099} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f4e1f0f0 75d96dae-6a9f-4ac8-8d87-b7e27a06aa94 /tidb/cdc/default/__cdc_meta__/owner/22318f43f4e1f145 3740de0c-97f2-4404-b8ea-d225116b9af3 /tidb/cdc/default/__cdc_meta__/owner/22318f43f4e1f179 bcd0ce48-6b0f-4dc8-b942-b80a09b84c36 /tidb/cdc/default/default/changefeed/info/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"upstream-id":7365147676054611453,"namespace":"default","changefeed-id":"38b0fb8f-762d-49a4-8286-cba15ceb1451","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-12796?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:14:54.473873401+08:00","start-ts":449532942865137666,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532944569073667} /tidb/cdc/default/default/changefeed/status/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"checkpoint-ts":449532946377867272,"min-table-barrier-ts":449532946377867272,"admin-job-type":0} /tidb/cdc/default/default/task/position/3740de0c-97f2-4404-b8ea-d225116b9af3/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/75d96dae-6a9f-4ac8-8d87-b7e27a06aa94/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/bcd0ce48-6b0f-4dc8-b942-b80a09b84c36/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365147676054611453 {"id":7365147676054611453,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** processors info ***: changefeedID: default/38b0fb8f-762d-49a4-8286-cba15ceb1451 {UpstreamID:7365147676054611453 Namespace:default ID:38b0fb8f-762d-49a4-8286-cba15ceb1451 SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-12796?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 22:14:54.473873401 +0800 CST StartTs:449532942865137666 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0014be750 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-52-g6a342866d Epoch:449532944569073667} {CheckpointTs:449532946377867272 MinTableBarrierTs:449532946377867272 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3740de0c-97f2-4404-b8ea-d225116b9af3 {"id":"3740de0c-97f2-4404-b8ea-d225116b9af3","address":"127.0.0.1:8301","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832096} /tidb/cdc/default/__cdc_meta__/capture/75d96dae-6a9f-4ac8-8d87-b7e27a06aa94 {"id":"75d96dae-6a9f-4ac8-8d87-b7e27a06aa94","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832091} /tidb/cdc/default/__cdc_meta__/capture/bcd0ce48-6b0f-4dc8-b942-b80a09b84c36 {"id":"bcd0ce48-6b0f-4dc8-b942-b80a09b84c36","address":"127.0.0.1:8302","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832099} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f4e1f0f0 75d96dae-6a9f-4ac8-8d87-b7e27a06aa94 /tidb/cdc/default/__cdc_meta__/owner/22318f43f4e1f145 3740de0c-97f2-4404-b8ea-d225116b9af3 /tidb/cdc/default/__cdc_meta__/owner/22318f43f4e1f179 bcd0ce48-6b0f-4dc8-b942-b80a09b84c36 /tidb/cdc/default/default/changefeed/info/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"upstream-id":7365147676054611453,"namespace":"default","changefeed-id":"38b0fb8f-762d-49a4-8286-cba15ceb1451","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-12796?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:14:54.473873401+08:00","start-ts":449532942865137666,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532944569073667} /tidb/cdc/default/default/changefeed/status/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"checkpoint-ts":449532946377867272,"min-table-barrier-ts":449532946377867272,"admin-job-type":0} /tidb/cdc/default/default/task/position/3740de0c-97f2-4404-b8ea-d225116b9af3/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/75d96dae-6a9f-4ac8-8d87-b7e27a06aa94/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/bcd0ce48-6b0f-4dc8-b942-b80a09b84c36/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365147676054611453 {"id":7365147676054611453,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + grep -q 'etcd info' + echo ' *** processors info ***: changefeedID: default/38b0fb8f-762d-49a4-8286-cba15ceb1451 {UpstreamID:7365147676054611453 Namespace:default ID:38b0fb8f-762d-49a4-8286-cba15ceb1451 SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-12796?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 22:14:54.473873401 +0800 CST StartTs:449532942865137666 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0014be750 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-52-g6a342866d Epoch:449532944569073667} {CheckpointTs:449532946377867272 MinTableBarrierTs:449532946377867272 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3740de0c-97f2-4404-b8ea-d225116b9af3 {"id":"3740de0c-97f2-4404-b8ea-d225116b9af3","address":"127.0.0.1:8301","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832096} /tidb/cdc/default/__cdc_meta__/capture/75d96dae-6a9f-4ac8-8d87-b7e27a06aa94 {"id":"75d96dae-6a9f-4ac8-8d87-b7e27a06aa94","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832091} /tidb/cdc/default/__cdc_meta__/capture/bcd0ce48-6b0f-4dc8-b942-b80a09b84c36 {"id":"bcd0ce48-6b0f-4dc8-b942-b80a09b84c36","address":"127.0.0.1:8302","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832099} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f4e1f0f0 75d96dae-6a9f-4ac8-8d87-b7e27a06aa94 /tidb/cdc/default/__cdc_meta__/owner/22318f43f4e1f145 3740de0c-97f2-4404-b8ea-d225116b9af3 /tidb/cdc/default/__cdc_meta__/owner/22318f43f4e1f179 bcd0ce48-6b0f-4dc8-b942-b80a09b84c36 /tidb/cdc/default/default/changefeed/info/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"upstream-id":7365147676054611453,"namespace":"default","changefeed-id":"38b0fb8f-762d-49a4-8286-cba15ceb1451","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-12796?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T22:14:54.473873401+08:00","start-ts":449532942865137666,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-52-g6a342866d","epoch":449532944569073667} /tidb/cdc/default/default/changefeed/status/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"checkpoint-ts":449532946377867272,"min-table-barrier-ts":449532946377867272,"admin-job-type":0} /tidb/cdc/default/default/task/position/3740de0c-97f2-4404-b8ea-d225116b9af3/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/75d96dae-6a9f-4ac8-8d87-b7e27a06aa94/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/bcd0ce48-6b0f-4dc8-b942-b80a09b84c36/38b0fb8f-762d-49a4-8286-cba15ceb1451 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365147676054611453 {"id":7365147676054611453,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + break + set +x table move_table.usertable exists go: downloading github.com/xdg/scram v1.0.5 go: downloading github.com/IBM/sarama v1.41.2 go: downloading github.com/benbjohnson/clock v1.3.5 go: downloading go.etcd.io/etcd/server/v3 v3.5.12 go: downloading github.com/cakturk/go-netstat v0.0.0-20200220111822-e5b49efee7a5 go: downloading github.com/go-mysql-org/go-mysql v1.7.1-0.20240314115043-2199dfb0ba98 go: downloading github.com/apache/pulsar-client-go v0.11.0 go: downloading github.com/aws/aws-sdk-go-v2 v1.19.1 go: downloading github.com/KimMachineGun/automemlimit v0.2.4 go: downloading github.com/gavv/monotime v0.0.0-20190418164738-30dba4353424 go: downloading gorm.io/gorm v1.24.5 go: downloading github.com/tinylib/msgp v1.1.6 go: downloading github.com/edwingeng/deque v0.0.0-20191220032131-8596380dee17 go: downloading github.com/pierrec/lz4/v4 v4.1.18 go: downloading github.com/gin-gonic/gin v1.9.1 go: downloading github.com/containerd/cgroups v1.0.4 go: downloading github.com/xdg/stringprep v1.0.3 go: downloading github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 go: downloading github.com/phayes/freeport v0.0.0-20180830031419-95f893ade6f2 go: downloading github.com/stretchr/objx v0.5.2 go: downloading github.com/philhofer/fwd v1.1.1 go: downloading github.com/jinzhu/now v1.1.5 go: downloading github.com/jinzhu/inflection v1.0.0 go: downloading github.com/opencontainers/runtime-spec v1.0.2 go: downloading github.com/sirupsen/logrus v1.9.3 go: downloading github.com/cilium/ebpf v0.4.0 go: downloading github.com/godbus/dbus/v5 v5.0.4 go: downloading github.com/gin-contrib/sse v0.1.0 go: downloading github.com/ugorji/go/codec v1.2.11 go: downloading github.com/go-playground/validator/v10 v10.14.0 go: downloading github.com/pelletier/go-toml/v2 v2.0.8 go: downloading github.com/siddontang/go v0.0.0-20180604090527-bdc77568d726 go: downloading github.com/siddontang/go-log v0.0.0-20180807004314-8d05993dda07 go: downloading github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 go: downloading github.com/aws/smithy-go v1.13.5 go: downloading github.com/linkedin/goavro/v2 v2.11.1 go: downloading github.com/pierrec/lz4 v2.6.1+incompatible go: downloading github.com/AthenZ/athenz v1.10.39 go: downloading golang.org/x/mod v0.17.0 go: downloading github.com/spaolacci/murmur3 v1.1.0 go: downloading github.com/bits-and-blooms/bitset v1.4.0 go: downloading github.com/99designs/keyring v1.2.1 go: downloading github.com/jcmturner/gokrb5/v8 v8.4.4 go: downloading github.com/hashicorp/go-multierror v1.1.1 go: downloading github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 go: downloading github.com/jcmturner/gofork v1.7.6 go: downloading github.com/eapache/go-xerial-snappy v0.0.0-20230731223053-c322873962e3 go: downloading github.com/eapache/queue v1.1.0 go: downloading github.com/eapache/go-resiliency v1.4.0 go: downloading github.com/go-playground/universal-translator v0.18.1 go: downloading github.com/leodido/go-urn v1.2.4 go: downloading github.com/gabriel-vasile/mimetype v1.4.2 go: downloading github.com/hashicorp/errwrap v1.0.0 go: downloading github.com/dvsekhvalnov/jose2go v1.5.0 go: downloading github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c go: downloading github.com/mtibben/percent v0.2.1 go: downloading github.com/go-playground/locales v0.14.1 go: downloading go.etcd.io/bbolt v1.3.9 go: downloading github.com/tmc/grpc-websocket-proxy v0.0.0-20220101234140-673ab2c3ae75 go: downloading go.etcd.io/etcd/pkg/v3 v3.5.12 go: downloading github.com/grpc-ecosystem/grpc-gateway v1.16.0 go: downloading github.com/soheilhy/cmux v0.1.5 go: downloading go.opentelemetry.io/otel/sdk v1.22.0 go: downloading go.etcd.io/etcd/raft/v3 v3.5.12 go: downloading go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.22.0 go: downloading github.com/golang-jwt/jwt/v4 v4.5.0 go: downloading github.com/jonboulle/clockwork v0.4.0 go: downloading github.com/xiang90/probing v0.0.0-20221125231312-a49e3df8f510 go: downloading sigs.k8s.io/yaml v1.4.0 go: downloading go.etcd.io/etcd/client/v2 v2.305.12 go: downloading github.com/gorilla/websocket v1.5.1 go: downloading github.com/jcmturner/dnsutils/v2 v2.0.0 go: downloading github.com/hashicorp/go-uuid v1.0.3 go: downloading go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.22.0 go: downloading github.com/jcmturner/aescts/v2 v2.0.0 go: downloading github.com/jcmturner/rpc/v2 v2.0.3 go: downloading go.opentelemetry.io/proto/otlp v1.1.0 go: downloading github.com/cenkalti/backoff/v4 v4.2.1 go: downloading github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.1 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_lost_callback/run.sh using Sink-Type: kafka... <<================= [Sat May 4 22:15:01 CST 2024] <<<<<< run test case mq_sink_lost_callback success! >>>>>> table partition_table.finish_mark not exists for 6-th check, retry later table mark.finish_mark_1 not exists for 6-th check, retry later Verifying downstream PD is started... go: downloading github.com/ardielle/ardielle-go v1.5.2 table mark.finish_mark_3 not exists for 27-th check, retry later [Sat May 4 22:15:04 CST 2024] <<<<<< START cdc server in kafka_simple_basic_avro case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_basic_avro.1015310155.out server --log-file /tmp/tidb_cdc_test/kafka_simple_basic_avro/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_simple_basic_avro/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:15:04 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/1d9008df-7bef-4ef3-8c2d-dc41d35c810d {"id":"1d9008df-7bef-4ef3-8c2d-dc41d35c810d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832101} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f50c20cf 1d9008df-7bef-4ef3-8c2d-dc41d35c810d /tidb/cdc/default/default/upstream/7365147735915213362 {"id":7365147735915213362,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/1d9008df-7bef-4ef3-8c2d-dc41d35c810d {"id":"1d9008df-7bef-4ef3-8c2d-dc41d35c810d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832101} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f50c20cf 1d9008df-7bef-4ef3-8c2d-dc41d35c810d /tidb/cdc/default/default/upstream/7365147735915213362 {"id":7365147735915213362,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/1d9008df-7bef-4ef3-8c2d-dc41d35c810d {"id":"1d9008df-7bef-4ef3-8c2d-dc41d35c810d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832101} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f50c20cf 1d9008df-7bef-4ef3-8c2d-dc41d35c810d /tidb/cdc/default/default/upstream/7365147735915213362 {"id":7365147735915213362,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_attributes.cli.26106.out cli changefeed create --start-ts=449532945812946945 '--sink-uri=kafka://127.0.0.1:9092/ticdc-ddl-attributes-test-30600?protocol=open-protocol&partition-num=4&kafka-version=2.4.1' Create changefeed successfully! ID: fcbb14df-50a7-42c7-bd0b-528b124fb952 Info: {"upstream_id":7365147735915213362,"namespace":"default","id":"fcbb14df-50a7-42c7-bd0b-528b124fb952","sink_uri":"kafka://127.0.0.1:9092/ticdc-ddl-attributes-test-30600?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1","create_time":"2024-05-04T22:15:04.882715038+08:00","start_ts":449532945812946945,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532945812946945,"checkpoint_ts":449532945812946945,"checkpoint_time":"2024-05-04 22:14:59.201"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... table mark.finish_mark_1 not exists for 7-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_dispatcher/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table partition_table.finish_mark not exists for 7-th check, retry later table ddl_sequence.finish_mark not exists for 1-th check, retry later + set +x [Sat May 4 22:15:06 CST 2024] <<<<<< START kafka consumer in ddl_attributes case >>>>>> table mark.finish_mark_3 not exists for 28-th check, retry later table mark.finish_mark_1 not exists for 8-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table partition_table.finish_mark exists check diff successfully + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:15:07 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/cfdec7c2-72ca-4cdf-8271-8325fd0cf066 {"id":"cfdec7c2-72ca-4cdf-8271-8325fd0cf066","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832104} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f5228ecc cfdec7c2-72ca-4cdf-8271-8325fd0cf066 /tidb/cdc/default/default/upstream/7365147746290740676 {"id":7365147746290740676,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/cfdec7c2-72ca-4cdf-8271-8325fd0cf066 {"id":"cfdec7c2-72ca-4cdf-8271-8325fd0cf066","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832104} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f5228ecc cfdec7c2-72ca-4cdf-8271-8325fd0cf066 /tidb/cdc/default/default/upstream/7365147746290740676 {"id":7365147746290740676,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/cfdec7c2-72ca-4cdf-8271-8325fd0cf066 {"id":"cfdec7c2-72ca-4cdf-8271-8325fd0cf066","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832104} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f5228ecc cfdec7c2-72ca-4cdf-8271-8325fd0cf066 /tidb/cdc/default/default/upstream/7365147746290740676 {"id":7365147746290740676,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_basic_avro.cli.10214.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-simple-basic-avro-9604?protocol=simple&encoding-format=avro' --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_basic_avro/conf/changefeed.toml wait process cdc.test exit for 1-th time... table ddl_sequence.finish_mark not exists for 2-th check, retry later Create changefeed successfully! ID: 118e54a2-3456-468f-b142-b4dc91a07095 Info: {"upstream_id":7365147746290740676,"namespace":"default","id":"118e54a2-3456-468f-b142-b4dc91a07095","sink_uri":"kafka://127.0.0.1:9092/ticdc-simple-basic-avro-9604?protocol=simple\u0026encoding-format=avro","create_time":"2024-05-04T22:15:07.758815903+08:00","start_ts":449532948013645830,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"simple","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"correctness","corruption_handle_level":"error"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532948013645830,"checkpoint_ts":449532948013645830,"checkpoint_time":"2024-05-04 22:15:07.596"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... wait process cdc.test exit for 2-th time... start tidb cluster in /tmp/tidb_cdc_test/mq_sink_dispatcher Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table mark.finish_mark_3 not exists for 29-th check, retry later cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:15:08 CST 2024] <<<<<< run test case partition_table success! >>>>>> table mark.finish_mark_1 not exists for 9-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x table ddl_sequence.finish_mark not exists for 3-th check, retry later table mark.finish_mark_3 not exists for 30-th check, retry later table mark.finish_mark_1 not exists for 10-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_sequence.finish_mark not exists for 4-th check, retry later table mark.finish_mark_3 not exists for 31-th check, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table ddl_attributes.attributes_t1_new not exists for 1-th check, retry later table mark.finish_mark_1 not exists for 11-th check, retry later table ddl_sequence.finish_mark exists VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd5e8440011 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw, pid:11347, start at 2024-05-04 22:15:12.166312539 +0800 CST m=+5.451208112 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:17:12.173 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:15:12.145 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:05:12.145 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff successfully table ddl_attributes.attributes_t1_new not exists for 2-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore wait process cdc.test exit for 1-th time... table mark.finish_mark_3 not exists for 32-th check, retry later Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Sat May 4 22:15:14 CST 2024] <<<<<< START kafka consumer in kafka_simple_basic_avro case >>>>>> wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:15:15 CST 2024] <<<<<< run test case ddl_sequence success! >>>>>> table mark.finish_mark_1 exists table mark.finish_mark_2 not exists for 1-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd5e8440011 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw, pid:11347, start at 2024-05-04 22:15:12.166312539 +0800 CST m=+5.451208112 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:17:12.173 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:15:12.145 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:05:12.145 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd5e8ec000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw, pid:11425, start at 2024-05-04 22:15:12.204959097 +0800 CST m=+5.431950852 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:17:12.211 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:15:12.187 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:05:12.187 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/cli_with_auth/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/cli_with_auth/tiflash/log/error.log arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/cli_with_auth/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/cli_with_auth/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/cli_with_auth/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_attributes.attributes_t1_new exists table ddl_attributes.finish_mark not exists for 1-th check, retry later table mark.finish_mark_3 not exists for 33-th check, retry later table mark.finish_mark_2 not exists for 2-th check, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.12770.out cli tso query --pd=http://127.0.0.1:2379 table ddl_attributes.finish_mark not exists for 2-th check, retry later table mark.finish_mark_3 not exists for 34-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:15:18.407 +08:00] [INFO] [main.go:61] ["table mover started"] [2024/05/04 22:15:18.410 +08:00] [INFO] [main.go:166] ["new cluster initialized"] [2024/05/04 22:15:18.411 +08:00] [DEBUG] [main.go:192] ["retrieved owner ID"] [ownerID=75d96dae-6a9f-4ac8-8d87-b7e27a06aa94] [2024/05/04 22:15:18.411 +08:00] [DEBUG] [main.go:199] ["retrieved owner addr"] [ownerAddr=127.0.0.1:8300] [2024/05/04 22:15:18.411 +08:00] [DEBUG] [main.go:210] ["retrieved changefeeds"] [changefeedsError="json: unsupported type: map[model.ChangeFeedID]*mvccpb.KeyValue"] [2024/05/04 22:15:18.607 +08:00] [DEBUG] [main.go:229] ["retrieved processor details"] [changefeed=38b0fb8f-762d-49a4-8286-cba15ceb1451] [captureID=3740de0c-97f2-4404-b8ea-d225116b9af3] [processorDetail="{\"table_ids\":[108]}"] [2024/05/04 22:15:18.807 +08:00] [DEBUG] [main.go:229] ["retrieved processor details"] [changefeed=38b0fb8f-762d-49a4-8286-cba15ceb1451] [captureID=75d96dae-6a9f-4ac8-8d87-b7e27a06aa94] [processorDetail="{\"table_ids\":[106]}"] table mark.finish_mark_2 not exists for 3-th check, retry later [2024/05/04 22:15:19.006 +08:00] [DEBUG] [main.go:229] ["retrieved processor details"] [changefeed=38b0fb8f-762d-49a4-8286-cba15ceb1451] [captureID=bcd0ce48-6b0f-4dc8-b942-b80a09b84c36] [processorDetail="{\"table_ids\":[]}"] [2024/05/04 22:15:19.006 +08:00] [INFO] [main.go:75] ["task status"] [status="{\"3740de0c-97f2-4404-b8ea-d225116b9af3\":[{\"ID\":108,\"Changefeed\":\"38b0fb8f-762d-49a4-8286-cba15ceb1451\"}],\"75d96dae-6a9f-4ac8-8d87-b7e27a06aa94\":[{\"ID\":106,\"Changefeed\":\"38b0fb8f-762d-49a4-8286-cba15ceb1451\"}],\"bcd0ce48-6b0f-4dc8-b942-b80a09b84c36\":[]}"] [2024/05/04 22:15:19.006 +08:00] [DEBUG] [main.go:288] ["preparing HTTP API call to owner"] [formStr="cf-id=38b0fb8f-762d-49a4-8286-cba15ceb1451&target-cp-id=bcd0ce48-6b0f-4dc8-b942-b80a09b84c36&table-id=108"] [2024/05/04 22:15:19.056 +08:00] [INFO] [main.go:180] ["moved table successful"] [tableID=108] [2024/05/04 22:15:19.056 +08:00] [INFO] [main.go:114] ["all tables are moved"] [sourceCapture=3740de0c-97f2-4404-b8ea-d225116b9af3] [targetCapture=bcd0ce48-6b0f-4dc8-b942-b80a09b84c36] table move_table.check1 exists check diff successfully table test.finish_mark not exists for 1-th check, retry later + set +x + tso='449532950739943425 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532950739943425 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 22:15:19 CST 2024] <<<<<< START cdc server in cli_with_auth case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.1282612828.out server --log-file /tmp/tidb_cdc_test/cli_with_auth/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/cli_with_auth/cdc_data --cluster-id default --config /tmp/tidb_cdc_test/cli_with_auth/server.toml + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table ddl_attributes.finish_mark not exists for 3-th check, retry later table mark.finish_mark_3 not exists for 35-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table mark.finish_mark_2 not exists for 4-th check, retry later \033[0;36m<<< Run all test success >>>\033[0m [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc already exists) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } table test.finish_mark not exists for 2-th check, retry later [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage table ddl_attributes.finish_mark not exists for 4-th check, retry later [Pipeline] } table mark.finish_mark_3 not exists for 36-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd659b00009 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6, pid:12419, start at 2024-05-04 22:15:19.414929014 +0800 CST m=+5.267563095 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:17:19.422 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:15:19.404 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:05:19.404 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd659b00009 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6, pid:12419, start at 2024-05-04 22:15:19.414929014 +0800 CST m=+5.267563095 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:17:19.422 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:15:19.404 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:05:19.404 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd6599c0006 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6, pid:12497, start at 2024-05-04 22:15:19.405412298 +0800 CST m=+5.204954846 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:17:19.412 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:15:19.399 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:05:19.399 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:15:22 GMT < Content-Length: 859 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f90a5d38-03d8-4581-be5d-0d416c4b7ea8 {"id":"f90a5d38-03d8-4581-be5d-0d416c4b7ea8","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832120} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f54ce949 f90a5d38-03d8-4581-be5d-0d416c4b7ea8 /tidb/cdc/default/default/upstream/7365147799832348465 {"id":7365147799832348465,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f90a5d38-03d8-4581-be5d-0d416c4b7ea8 {"id":"f90a5d38-03d8-4581-be5d-0d416c4b7ea8","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832120} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f54ce949 f90a5d38-03d8-4581-be5d-0d416c4b7ea8 /tidb/cdc/default/default/upstream/7365147799832348465 {"id":7365147799832348465,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f90a5d38-03d8-4581-be5d-0d416c4b7ea8 {"id":"f90a5d38-03d8-4581-be5d-0d416c4b7ea8","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832120} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f54ce949 f90a5d38-03d8-4581-be5d-0d416c4b7ea8 /tidb/cdc/default/default/upstream/7365147799832348465 {"id":7365147799832348465,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.12887.out cli changefeed create --start-ts=449532950739943425 '--sink-uri=kafka://127.0.0.1:9092/ticdc-cli-test-6082?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --tz=Asia/Shanghai -c=custom-changefeed-name [WARN] --tz is deprecated in changefeed settings. table mark.finish_mark_2 not exists for 5-th check, retry later Create changefeed successfully! ID: custom-changefeed-name Info: {"upstream_id":7365147799832348465,"namespace":"default","id":"custom-changefeed-name","sink_uri":"kafka://127.0.0.1:9092/ticdc-cli-test-6082?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:15:23.324739836+08:00","start_ts":449532950739943425,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532950739943425,"checkpoint_ts":449532950739943425,"checkpoint_time":"2024-05-04 22:15:17.996"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... [2024/05/04 22:15:23.010 +08:00] [INFO] [main.go:61] ["table mover started"] [2024/05/04 22:15:23.012 +08:00] [INFO] [main.go:166] ["new cluster initialized"] [2024/05/04 22:15:23.012 +08:00] [DEBUG] [main.go:192] ["retrieved owner ID"] [ownerID=75d96dae-6a9f-4ac8-8d87-b7e27a06aa94] [2024/05/04 22:15:23.013 +08:00] [DEBUG] [main.go:199] ["retrieved owner addr"] [ownerAddr=127.0.0.1:8300] [2024/05/04 22:15:23.013 +08:00] [DEBUG] [main.go:210] ["retrieved changefeeds"] [changefeedsError="json: unsupported type: map[model.ChangeFeedID]*mvccpb.KeyValue"] [2024/05/04 22:15:23.207 +08:00] [DEBUG] [main.go:229] ["retrieved processor details"] [changefeed=38b0fb8f-762d-49a4-8286-cba15ceb1451] [captureID=3740de0c-97f2-4404-b8ea-d225116b9af3] [processorDetail="{\"table_ids\":[]}"] [2024/05/04 22:15:23.406 +08:00] [DEBUG] [main.go:229] ["retrieved processor details"] [changefeed=38b0fb8f-762d-49a4-8286-cba15ceb1451] [captureID=75d96dae-6a9f-4ac8-8d87-b7e27a06aa94] [processorDetail="{\"table_ids\":[110]}"] table test.finish_mark not exists for 3-th check, retry later [2024/05/04 22:15:23.606 +08:00] [DEBUG] [main.go:229] ["retrieved processor details"] [changefeed=38b0fb8f-762d-49a4-8286-cba15ceb1451] [captureID=bcd0ce48-6b0f-4dc8-b942-b80a09b84c36] [processorDetail="{\"table_ids\":[108]}"] [2024/05/04 22:15:23.606 +08:00] [INFO] [main.go:75] ["task status"] [status="{\"3740de0c-97f2-4404-b8ea-d225116b9af3\":[],\"75d96dae-6a9f-4ac8-8d87-b7e27a06aa94\":[{\"ID\":110,\"Changefeed\":\"38b0fb8f-762d-49a4-8286-cba15ceb1451\"}],\"bcd0ce48-6b0f-4dc8-b942-b80a09b84c36\":[{\"ID\":108,\"Changefeed\":\"38b0fb8f-762d-49a4-8286-cba15ceb1451\"}]}"] [2024/05/04 22:15:23.606 +08:00] [DEBUG] [main.go:288] ["preparing HTTP API call to owner"] [formStr="cf-id=38b0fb8f-762d-49a4-8286-cba15ceb1451&target-cp-id=bcd0ce48-6b0f-4dc8-b942-b80a09b84c36&table-id=110"] [2024/05/04 22:15:23.656 +08:00] [INFO] [main.go:180] ["moved table successful"] [tableID=110] [2024/05/04 22:15:23.656 +08:00] [INFO] [main.go:114] ["all tables are moved"] [sourceCapture=75d96dae-6a9f-4ac8-8d87-b7e27a06aa94] [targetCapture=bcd0ce48-6b0f-4dc8-b942-b80a09b84c36] check diff successfully table ddl_attributes.finish_mark not exists for 5-th check, retry later table move_table.check2 not exists for 1-th check, retry later [Sat May 4 22:15:24 CST 2024] <<<<<< START cdc server in mq_sink_dispatcher case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.1387313875.out server --log-file /tmp/tidb_cdc_test/mq_sink_dispatcher/cdc.log --log-level info --data-dir /tmp/tidb_cdc_test/mq_sink_dispatcher/cdc_data --cluster-id default + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + set +x [Sat May 4 22:15:24 CST 2024] <<<<<< START kafka consumer in cli_with_auth case >>>>>> table test.simple not exists for 1-th check, retry later table mark.finish_mark_3 not exists for 37-th check, retry later table mark.finish_mark_2 not exists for 6-th check, retry later table move_table.check2 exists table test.finish_mark not exists for 4-th check, retry later check diff successfully table ddl_attributes.finish_mark not exists for 6-th check, retry later wait process cdc.test exit for 1-th time... table mark.finish_mark_3 not exists for 38-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/resourcecontrol/run.sh using Sink-Type: kafka... <<================= table mark.finish_mark_2 exists table mark.finish_mark_3 not exists for 1-th check, retry later The 1 times to try to start tidb cluster... wait process cdc.test exit for 2-th time... table test.simple not exists for 2-th check, retry later wait process cdc.test exit for 3-th time... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:15:27 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bba1007a-7708-4f8b-85b6-4c99e272e55f {"id":"bba1007a-7708-4f8b-85b6-4c99e272e55f","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832124} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f56c03cc bba1007a-7708-4f8b-85b6-4c99e272e55f /tidb/cdc/default/default/upstream/7365147836923933904 {"id":7365147836923933904,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bba1007a-7708-4f8b-85b6-4c99e272e55f {"id":"bba1007a-7708-4f8b-85b6-4c99e272e55f","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832124} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f56c03cc bba1007a-7708-4f8b-85b6-4c99e272e55f /tidb/cdc/default/default/upstream/7365147836923933904 {"id":7365147836923933904,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bba1007a-7708-4f8b-85b6-4c99e272e55f {"id":"bba1007a-7708-4f8b-85b6-4c99e272e55f","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832124} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f56c03cc bba1007a-7708-4f8b-85b6-4c99e272e55f /tidb/cdc/default/default/upstream/7365147836923933904 {"id":7365147836923933904,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x table test.finish_mark not exists for 5-th check, retry later cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 22:15:28 CST 2024] <<<<<< run test case move_table success! >>>>>> table ddl_attributes.finish_mark not exists for 7-th check, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.cli.13946.out cli tso query --pd=http://127.0.0.1:2379 table mark.finish_mark_3 not exists for 39-th check, retry later table mark.finish_mark_3 not exists for 2-th check, retry later table test.simple exists table test.`simple-dash` exists + endpoints=http://127.0.0.1:2379 + changefeed_id=custom-changefeed-name + expected_state=normal + error_msg=null + tls_dir= + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c custom-changefeed-name -s + info='{ "upstream_id": 7365147799832348465, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449532951159373825, "checkpoint_time": "2024-05-04 22:15:19.596", "error": null }' + echo '{ "upstream_id": 7365147799832348465, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449532951159373825, "checkpoint_time": "2024-05-04 22:15:19.596", "error": null }' { "upstream_id": 7365147799832348465, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449532951159373825, "checkpoint_time": "2024-05-04 22:15:19.596", "error": null } ++ echo '{' '"upstream_id":' 7365147799832348465, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449532951159373825, '"checkpoint_time":' '"2024-05-04' '22:15:19.596",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365147799832348465, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449532951159373825, '"checkpoint_time":' '"2024-05-04' '22:15:19.596",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] changefeed count 1 check pass, pd_addr: http://127.0.0.1:2379 changefeed count 1 check pass, pd_addr: http://127.0.0.1:2679 changefeed count 1 check pass, pd_addr: http://127.0.0.1:2779 table test.finish_mark not exists for 6-th check, retry later changefeed count 1 check pass, pd_addr: http://127.0.0.1:2379,http://127.0.0.1:2679,http://127.0.0.1:2779 table ddl_attributes.finish_mark not exists for 8-th check, retry later + set +x + tso='449532953625624578 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532953625624578 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.cli.13983.out cli changefeed create --start-ts=449532953625624578 '--sink-uri=kafka://127.0.0.1:9092/dispatcher-test?protocol=canal-json&enable-tidb-extension=true' -c test --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_dispatcher/conf/changefeed.toml Error: [CDC:ErrChangefeedUpdateRefused]changefeed update error: can only update changefeed config when it is stopped or failed update changefeed config should fail when changefeed is running, got Diff of changefeed config: {Type:update Path:[Config CaseSensitive] From:false To:true} {Type:update Path:[Config SyncPointInterval] From: To:0xc00163c0b0} {Type:update Path:[Config SyncPointRetention] From: To:0xc00163c0b8} {Type:update Path:[Config Consistent] From: To:0xc0012d7340} {Type:update Path:[Config Scheduler EnableTableAcrossNodes] From:false To:true} + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.13251.out cli changefeed --changefeed-id custom-changefeed-name pause table mark.finish_mark_3 not exists for 3-th check, retry later Create changefeed successfully! ID: test Info: {"upstream_id":7365147836923933904,"namespace":"default","id":"test","sink_uri":"kafka://127.0.0.1:9092/dispatcher-test?protocol=canal-json\u0026enable-tidb-extension=true","create_time":"2024-05-04T22:15:30.914244602+08:00","start_ts":449532953625624578,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"dispatchers":[{"matcher":["verify.t"],"partition":"index-value"},{"matcher":["dispatcher.index"],"partition":"index-value","index":"idx_a"}],"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532953625624578,"checkpoint_ts":449532953625624578,"checkpoint_time":"2024-05-04 22:15:29.004"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... table mark.finish_mark_3 not exists for 40-th check, retry later PASS coverage: 1.9% of statements in github.com/pingcap/tiflow/... table test.finish_mark not exists for 7-th check, retry later table ddl_attributes.finish_mark not exists for 9-th check, retry later + set +x check_changefeed_state http://127.0.0.1:2379 test normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=test + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s + info='{ "upstream_id": 7365147836923933904, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449532953625624578, "checkpoint_time": "2024-05-04 22:15:29.004", "error": null }' + echo '{ "upstream_id": 7365147836923933904, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449532953625624578, "checkpoint_time": "2024-05-04 22:15:29.004", "error": null }' { "upstream_id": 7365147836923933904, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449532953625624578, "checkpoint_time": "2024-05-04 22:15:29.004", "error": null } ++ echo '{' '"upstream_id":' 7365147836923933904, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449532953625624578, '"checkpoint_time":' '"2024-05-04' '22:15:29.004",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365147836923933904, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449532953625624578, '"checkpoint_time":' '"2024-05-04' '22:15:29.004",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully + set +x table mark.finish_mark_3 not exists for 4-th check, retry later table mark.finish_mark_3 not exists for 41-th check, retry later table test.finish_mark not exists for 8-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/resourcecontrol Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table ddl_attributes.finish_mark exists check_changefeed_state http://127.0.0.1:2379 test failed ErrDispatcherFailed + endpoints=http://127.0.0.1:2379 + changefeed_id=test + expected_state=failed + error_msg=ErrDispatcherFailed + tls_dir=ErrDispatcherFailed + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s + info='{ "upstream_id": 7365147836923933904, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449532953625624578, "checkpoint_time": "2024-05-04 22:15:29.004", "error": null }' + echo '{ "upstream_id": 7365147836923933904, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449532953625624578, "checkpoint_time": "2024-05-04 22:15:29.004", "error": null }' { "upstream_id": 7365147836923933904, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449532953625624578, "checkpoint_time": "2024-05-04 22:15:29.004", "error": null } ++ echo '{' '"upstream_id":' 7365147836923933904, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449532953625624578, '"checkpoint_time":' '"2024-05-04' '22:15:29.004",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \f\a\i\l\e\d ]] + echo 'changefeed state normal does not equal to failed' changefeed state normal does not equal to failed + exit 1 run task failed 1-th time, retry later check diff successfully wait process cdc.test exit for 1-th time... table mark.finish_mark_3 not exists for 42-th check, retry later wait process cdc.test exit for 2-th time... table mark.finish_mark_3 not exists for 5-th check, retry later cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:15:35 CST 2024] <<<<<< run test case ddl_attributes success! >>>>>> check_changefeed_state http://127.0.0.1:2379 test failed ErrDispatcherFailed + endpoints=http://127.0.0.1:2379 + changefeed_id=test + expected_state=failed + error_msg=ErrDispatcherFailed + tls_dir=ErrDispatcherFailed + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s + endpoints=http://127.0.0.1:2379 + changefeed_id=custom-changefeed-name + expected_state=stopped + error_msg=null + tls_dir= + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c custom-changefeed-name -s + info='{ "upstream_id": 7365147799832348465, "namespace": "default", "id": "custom-changefeed-name", "state": "stopped", "checkpoint_tso": 449532954095386626, "checkpoint_time": "2024-05-04 22:15:30.796", "error": null }' + echo '{ "upstream_id": 7365147799832348465, "namespace": "default", "id": "custom-changefeed-name", "state": "stopped", "checkpoint_tso": 449532954095386626, "checkpoint_time": "2024-05-04 22:15:30.796", "error": null }' { "upstream_id": 7365147799832348465, "namespace": "default", "id": "custom-changefeed-name", "state": "stopped", "checkpoint_tso": 449532954095386626, "checkpoint_time": "2024-05-04 22:15:30.796", "error": null } ++ echo '{' '"upstream_id":' 7365147799832348465, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"stopped",' '"checkpoint_tso":' 449532954095386626, '"checkpoint_time":' '"2024-05-04' '22:15:30.796",' '"error":' null '}' ++ jq -r .state + state=stopped + [[ ! stopped == \s\t\o\p\p\e\d ]] ++ echo '{' '"upstream_id":' 7365147799832348465, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"stopped",' '"checkpoint_tso":' 449532954095386626, '"checkpoint_time":' '"2024-05-04' '22:15:30.796",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.13335.out cli changefeed update --pd=http://127.0.0.1:2379,http://127.0.0.1:2679,http://127.0.0.1:2779 --config=/tmp/tidb_cdc_test/cli_with_auth/changefeed.toml --no-confirm --changefeed-id custom-changefeed-name table test.finish_mark not exists for 9-th check, retry later + info='{ "upstream_id": 7365147836923933904, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449532954726629411, "checkpoint_time": "2024-05-04 22:15:33.204", "error": null }' + echo '{ "upstream_id": 7365147836923933904, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449532954726629411, "checkpoint_time": "2024-05-04 22:15:33.204", "error": null }' { "upstream_id": 7365147836923933904, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449532954726629411, "checkpoint_time": "2024-05-04 22:15:33.204", "error": null } ++ echo '{' '"upstream_id":' 7365147836923933904, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449532954726629411, '"checkpoint_time":' '"2024-05-04' '22:15:33.204",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \f\a\i\l\e\d ]] + echo 'changefeed state normal does not equal to failed' changefeed state normal does not equal to failed + exit 1 run task failed 2-th time, retry later Diff of changefeed config: {Type:update Path:[Config CaseSensitive] From:false To:true} {Type:update Path:[Config SyncPointInterval] From: To:0xc0019ad4a8} {Type:update Path:[Config SyncPointRetention] From: To:0xc0019ad4b8} {Type:update Path:[Config Consistent] From: To:0xc001420620} {Type:update Path:[Config Scheduler EnableTableAcrossNodes] From:false To:true} table mark.finish_mark_3 not exists for 43-th check, retry later Update changefeed config successfully! ID: custom-changefeed-name Info: {"upstream_id":7365147799832348465,"namespace":"default","id":"custom-changefeed-name","sink_uri":"kafka://127.0.0.1:9092/ticdc-cli-test-6082?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:15:23.324739836+08:00","start_ts":449532950739943425,"admin_job_type":1,"config":{"memory_quota":1073741824,"case_sensitive":true,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_table_monitor":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","encoder_concurrency":32,"terminator":"\r\n","enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":true,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"stopped","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":0,"checkpoint_ts":449532954095386626,"checkpoint_time":"2024-05-04 22:15:30.796"} PASS coverage: 2.8% of statements in github.com/pingcap/tiflow/... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table mark.finish_mark_3 exists table mark.finish_mark_4 not exists for 1-th check, retry later table test.finish_mark not exists for 10-th check, retry later + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.13374.out cli changefeed --changefeed-id custom-changefeed-name resume table mark.finish_mark_3 not exists for 44-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) PASS table mark.finish_mark_4 not exists for 2-th check, retry later coverage: 2.1% of statements in github.com/pingcap/tiflow/... table test.finish_mark not exists for 11-th check, retry later check_changefeed_state http://127.0.0.1:2379 test failed ErrDispatcherFailed + endpoints=http://127.0.0.1:2379 + changefeed_id=test + expected_state=failed + error_msg=ErrDispatcherFailed + tls_dir=ErrDispatcherFailed + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s + info='{ "upstream_id": 7365147836923933904, "namespace": "default", "id": "test", "state": "failed", "checkpoint_tso": 449532954726629411, "checkpoint_time": "2024-05-04 22:15:33.204", "error": { "time": "2024-05-04T22:15:37.459100917+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrDispatcherFailed", "message": "[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a" } }' + echo '{ "upstream_id": 7365147836923933904, "namespace": "default", "id": "test", "state": "failed", "checkpoint_tso": 449532954726629411, "checkpoint_time": "2024-05-04 22:15:33.204", "error": { "time": "2024-05-04T22:15:37.459100917+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrDispatcherFailed", "message": "[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a" } }' { "upstream_id": 7365147836923933904, "namespace": "default", "id": "test", "state": "failed", "checkpoint_tso": 449532954726629411, "checkpoint_time": "2024-05-04 22:15:33.204", "error": { "time": "2024-05-04T22:15:37.459100917+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrDispatcherFailed", "message": "[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a" } } ++ jq -r .state ++ echo '{' '"upstream_id":' 7365147836923933904, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"failed",' '"checkpoint_tso":' 449532954726629411, '"checkpoint_time":' '"2024-05-04' '22:15:33.204",' '"error":' '{' '"time":' '"2024-05-04T22:15:37.459100917+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrDispatcherFailed",' '"message":' '"[CDC:ErrDispatcherFailed]index' not found when dispatch event, table: index, index: 'idx_a"' '}' '}' + set +x + state=failed + [[ ! failed == \f\a\i\l\e\d ]] ++ jq -r .error.message ++ echo '{' '"upstream_id":' 7365147836923933904, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"failed",' '"checkpoint_tso":' 449532954726629411, '"checkpoint_time":' '"2024-05-04' '22:15:33.204",' '"error":' '{' '"time":' '"2024-05-04T22:15:37.459100917+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrDispatcherFailed",' '"message":' '"[CDC:ErrDispatcherFailed]index' not found when dispatch event, table: index, index: 'idx_a"' '}' '}' + message='[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a' + [[ ! [CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a =~ ErrDispatcherFailed ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.cli.14253.out cli changefeed update -c test '--sink-uri=kafka://127.0.0.1:9092/dispatcher-test?protocol=canal-json&enable-tidb-extension=true' --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_dispatcher/conf/new_changefeed.toml --no-confirm table mark.finish_mark_4 not exists for 3-th check, retry later table mark.finish_mark_3 not exists for 45-th check, retry later Diff of changefeed config: {Type:update Path:[Config SyncPointInterval] From: To:0xc0039a0388} {Type:update Path:[Config SyncPointRetention] From: To:0xc0039a0398} {Type:update Path:[Config Sink DispatchRules 0 Matcher 0] From:verify.t To:dispatcher.index} {Type:delete Path:[Config Sink DispatchRules 1 Matcher 0] From:dispatcher.index To:} {Type:delete Path:[Config Sink DispatchRules 1 PartitionRule] From:index-value To:} {Type:delete Path:[Config Sink DispatchRules 1 IndexName] From:idx_a To:} {Type:update Path:[Config Consistent] From: To:0xc0013dc1c0} Update changefeed config successfully! ID: test Info: {"upstream_id":7365147836923933904,"namespace":"default","id":"test","sink_uri":"kafka://127.0.0.1:9092/dispatcher-test?protocol=canal-json\u0026enable-tidb-extension=true","create_time":"2024-05-04T22:15:30.914244602+08:00","start_ts":449532953625624578,"admin_job_type":1,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_table_monitor":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","dispatchers":[{"matcher":["dispatcher.index"],"partition":"index-value"}],"encoder_concurrency":32,"terminator":"\r\n","enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"failed","error":{"addr":"127.0.0.1:8300","code":"CDC:ErrDispatcherFailed","message":"[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a"},"creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":0,"checkpoint_ts":449532954726629411,"checkpoint_time":"2024-05-04 22:15:33.204"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.finish_mark not exists for 12-th check, retry later + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.cli.14287.out cli changefeed resume -c test =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/autorandom/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table mark.finish_mark_4 not exists for 4-th check, retry later table mark.finish_mark_3 not exists for 46-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) PASS coverage: 2.1% of statements in github.com/pingcap/tiflow/... table test.finish_mark not exists for 13-th check, retry later + endpoints=http://127.0.0.1:2379 + changefeed_id=custom-changefeed-name + expected_state=normal + error_msg=null + tls_dir= + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c custom-changefeed-name -s + info='{ "upstream_id": 7365147799832348465, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449532957240852483, "checkpoint_time": "2024-05-04 22:15:42.795", "error": null }' + echo '{ "upstream_id": 7365147799832348465, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449532957240852483, "checkpoint_time": "2024-05-04 22:15:42.795", "error": null }' { "upstream_id": 7365147799832348465, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449532957240852483, "checkpoint_time": "2024-05-04 22:15:42.795", "error": null } ++ echo '{' '"upstream_id":' 7365147799832348465, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449532957240852483, '"checkpoint_time":' '"2024-05-04' '22:15:42.795",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365147799832348465, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449532957240852483, '"checkpoint_time":' '"2024-05-04' '22:15:42.795",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.13465.out cli changefeed --changefeed-id custom-changefeed-name remove Changefeed remove successfully. ID: custom-changefeed-name CheckpointTs: 449532957502996483 SinkURI: kafka://127.0.0.1:9092/ticdc-cli-test-6082?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... + set +x check_changefeed_state http://127.0.0.1:2379 test normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=test + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s + info='{ "upstream_id": 7365147836923933904, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449532954726629411, "checkpoint_time": "2024-05-04 22:15:33.204", "error": null }' + echo '{ "upstream_id": 7365147836923933904, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449532954726629411, "checkpoint_time": "2024-05-04 22:15:33.204", "error": null }' { "upstream_id": 7365147836923933904, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449532954726629411, "checkpoint_time": "2024-05-04 22:15:33.204", "error": null } ++ echo '{' '"upstream_id":' 7365147836923933904, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449532954726629411, '"checkpoint_time":' '"2024-05-04' '22:15:33.204",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365147836923933904, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449532954726629411, '"checkpoint_time":' '"2024-05-04' '22:15:33.204",' '"error":' null '}' ++ jq -r .error.message table mark.finish_mark_3 exists table mark.finish_mark not exists for 1-th check, retry later + message=null + [[ ! null =~ null ]] run task successfully VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd7db200011 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-mpv2f-zft7l, pid:11013, start at 2024-05-04 22:15:44.088999971 +0800 CST m=+5.226414049 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:17:44.095 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:15:44.072 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:05:44.072 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd7db200011 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-mpv2f-zft7l, pid:11013, start at 2024-05-04 22:15:44.088999971 +0800 CST m=+5.226414049 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:17:44.095 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:15:44.072 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:05:44.072 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd7dbcc0008 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-mpv2f-zft7l, pid:11099, start at 2024-05-04 22:15:44.123370632 +0800 CST m=+5.208475839 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:17:44.130 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:15:44.115 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:05:44.115 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/resourcecontrol/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/resourcecontrol/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/resourcecontrol/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/resourcecontrol/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/resourcecontrol/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table mark.finish_mark_4 not exists for 5-th check, retry later table test.finish_mark not exists for 1-th check, retry later + set +x table test.finish_mark not exists for 14-th check, retry later table mark.finish_mark exists table mark.finish_mark_4 not exists for 6-th check, retry later check diff successfully [Sat May 4 22:15:41 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>> schema registry uri found: 1 [Sat May 4 22:15:41 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>> schema registry uri found: 2 [Sat May 4 22:15:41 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>> schema registry uri found: 3 table test.table1 not exists for 1-th check, retry later table test.table1 not exists for 2-th check, retry later table test.table1 exists table test.table2 exists table test.table3 exists check diff successfully table test.table10 not exists for 1-th check, retry later table test.finish_mark exists + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resourcecontrol.cli.12495.out cli tso query --pd=http://127.0.0.1:2379 table test.table10 exists table test.table20 exists check diff successfully check diff successfully wait process cdc.test exit for 1-th time... table test.finish_mark not exists for 15-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/autorandom Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... wait process cdc.test exit for 3-th time... wait process cdc.test exit for 2-th time... + set +x + tso='449532958480007169 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532958480007169 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 22:15:49 CST 2024] <<<<<< START cdc server in resourcecontrol case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resourcecontrol.1253712539.out server --log-file /tmp/tidb_cdc_test/resourcecontrol/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/resourcecontrol/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:15:49 CST 2024] <<<<<< run test case mq_sink_dispatcher success! >>>>>> table mark.finish_mark_4 not exists for 7-th check, retry later cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 22:15:49 CST 2024] <<<<<< run test case default_value success! >>>>>> changefeed count 0 check pass, pd_addr: http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.13539.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-cli-test-6082?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --tz=Asia/Shanghai -c=custom-changefeed-name [WARN] --tz is deprecated in changefeed settings. Create changefeed successfully! ID: custom-changefeed-name Info: {"upstream_id":7365147799832348465,"namespace":"default","id":"custom-changefeed-name","sink_uri":"kafka://127.0.0.1:9092/ticdc-cli-test-6082?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:15:49.638458889+08:00","start_ts":449532959010324481,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532959010324481,"checkpoint_ts":449532959010324481,"checkpoint_time":"2024-05-04 22:15:49.545"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... table test.finish_mark not exists for 16-th check, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release \033[0;36m<<< Run all test success >>>\033[0m [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc already exists) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] } table mark.finish_mark_4 not exists for 8-th check, retry later [Pipeline] // container [Pipeline] } + set +x [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] } table test.finish_mark exists check diff successfully Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:15:52 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ce703ed4-b9a5-480c-92c5-07a2aa9d08ea {"id":"ce703ed4-b9a5-480c-92c5-07a2aa9d08ea","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832149} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f5cc92cf ce703ed4-b9a5-480c-92c5-07a2aa9d08ea /tidb/cdc/default/default/upstream/7365147939772605217 {"id":7365147939772605217,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ce703ed4-b9a5-480c-92c5-07a2aa9d08ea {"id":"ce703ed4-b9a5-480c-92c5-07a2aa9d08ea","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832149} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f5cc92cf ce703ed4-b9a5-480c-92c5-07a2aa9d08ea /tidb/cdc/default/default/upstream/7365147939772605217 {"id":7365147939772605217,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ce703ed4-b9a5-480c-92c5-07a2aa9d08ea {"id":"ce703ed4-b9a5-480c-92c5-07a2aa9d08ea","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832149} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f5cc92cf ce703ed4-b9a5-480c-92c5-07a2aa9d08ea /tidb/cdc/default/default/upstream/7365147939772605217 {"id":7365147939772605217,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resourcecontrol.cli.12602.out cli changefeed create --start-ts=449532958480007169 '--sink-uri=kafka://127.0.0.1:9092/ticdc-resourcecontrol-test-5588?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: d0b73117-36af-4e57-910f-cbf147b85987 Info: {"upstream_id":7365147939772605217,"namespace":"default","id":"d0b73117-36af-4e57-910f-cbf147b85987","sink_uri":"kafka://127.0.0.1:9092/ticdc-resourcecontrol-test-5588?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:15:52.570973675+08:00","start_ts":449532958480007169,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532958480007169,"checkpoint_ts":449532958480007169,"checkpoint_time":"2024-05-04 22:15:47.522"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... table mark.finish_mark_4 not exists for 9-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x [Sat May 4 22:15:54 CST 2024] <<<<<< START kafka consumer in resourcecontrol case >>>>>> + endpoints=http://127.0.0.1:2379 + changefeed_id=custom-changefeed-name + expected_state=normal + error_msg=null + tls_dir= + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c custom-changefeed-name -s + info='{ "upstream_id": 7365147799832348465, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449532960124436484, "checkpoint_time": "2024-05-04 22:15:53.795", "error": null }' + echo '{ "upstream_id": 7365147799832348465, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449532960124436484, "checkpoint_time": "2024-05-04 22:15:53.795", "error": null }' { "upstream_id": 7365147799832348465, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449532960124436484, "checkpoint_time": "2024-05-04 22:15:53.795", "error": null } ++ echo '{' '"upstream_id":' 7365147799832348465, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449532960124436484, '"checkpoint_time":' '"2024-05-04' '22:15:53.795",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365147799832348465, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449532960124436484, '"checkpoint_time":' '"2024-05-04' '22:15:53.795",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.13643.out cli changefeed create --start-ts=449532950739943425 '--sink-uri=kafka://127.0.0.1:9093/ticdc-cli-test-ssl-17133?protocol=open-protocol&ca=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem&cert=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem&key=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem&kafka-version=2.4.1&max-message-bytes=10485760&insecure-skip-verify=true' --tz=Asia/Shanghai [WARN] --tz is deprecated in changefeed settings. table mark.finish_mark_4 exists table mark.finish_mark not exists for 1-th check, retry later Create changefeed successfully! ID: c70a8ef3-72e3-48d4-b6c6-8bf7a2da6935 Info: {"upstream_id":7365147799832348465,"namespace":"default","id":"c70a8ef3-72e3-48d4-b6c6-8bf7a2da6935","sink_uri":"kafka://127.0.0.1:9093/ticdc-cli-test-ssl-17133?protocol=open-protocol\u0026ca=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem\u0026cert=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem\u0026key=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760\u0026insecure-skip-verify=true","create_time":"2024-05-04T22:15:55.569934201+08:00","start_ts":449532950739943425,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532950739943425,"checkpoint_ts":449532950739943425,"checkpoint_time":"2024-05-04 22:15:17.996"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table resourcecontrol.finish_mark not exists for 1-th check, retry later table test.finish_mark not exists for 1-th check, retry later + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.13683.out cli unsafe delete-service-gc-safepoint table mark.finish_mark not exists for 2-th check, retry later Confirm that you know what this command will do and use it at your own risk [Y/N] CDC service GC safepoint truncated in PD! PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... table test.finish_mark not exists for 2-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd8a7bc0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-3jhbg-sn2sk, pid:30287, start at 2024-05-04 22:15:57.212526318 +0800 CST m=+5.335731625 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:17:57.221 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:15:57.217 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:05:57.217 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table resourcecontrol.finish_mark exists check diff successfully + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.13718.out cli unsafe reset --no-confirm --pd=http://127.0.0.1:2379 reset and all metadata truncated in PD! PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... table mark.finish_mark not exists for 3-th check, retry later wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... table test.finish_mark not exists for 3-th check, retry later cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:16:00 CST 2024] <<<<<< run test case resourcecontrol success! >>>>>> + set +x table mark.finish_mark not exists for 4-th check, retry later /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_dispatcher/run.sh: line 1: 14381 Killed cdc_kafka_consumer --upstream-uri $SINK_URI --downstream-uri="mysql://root@127.0.0.1:3306/?safe-mode=true&batch-dml-enable=false" --upstream-tidb-dsn="root@tcp(${UP_TIDB_HOST}:${UP_TIDB_PORT})/?" --config="$CUR/conf/new_changefeed.toml" 2>&1 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_column_selector/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [2024/05/04 22:15:53.199 +08:00] [WARN] [diff.go:551] ["checksum is not equal"] [table=`test`.`pkuk`] [where="((TRUE) AND TRUE)"] ["source checksum"=2228319994] ["target checksum"=1712949501] ["get source checksum cost"=2.154125ms] ["get target checksum cost"=1.367992ms] [2024/05/04 22:15:53.202 +08:00] [WARN] [diff.go:895] ["target had superfluous data"] [row="{ pk: 0, uk: 1, v: 2608, }"] [2024/05/04 22:15:53.223 +08:00] [WARN] [diff.go:895] ["target had superfluous data"] [row="{ pk: 4, uk: 8, v: 7908, }"] [2024/05/04 22:15:53.323 +08:00] [WARN] [diff.go:895] ["target had superfluous data"] [row="{ pk: 10, uk: 3, v: 5269, }"] [2024/05/04 22:15:53.423 +08:00] [WARN] [diff.go:895] ["target had superfluous data"] [row="{ pk: 12, uk: 11, v: 5372, }"] [2024/05/04 22:15:53.724 +08:00] [WARN] [diff.go:745] ["rows is not equal"] [table=`test`.`pkuk`] [where="((TRUE) AND TRUE)"] [cost=524.187622ms] [2024/05/04 22:15:53.727 +08:00] [WARN] [diff.go:384] ["check chunk data not equal"] [chunk="{\"id\":0,\"bounds\":[],\"where\":\"((TRUE) AND TRUE)\",\"args\":null,\"state\":\"failed\"}"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd8a7bc0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-3jhbg-sn2sk, pid:30287, start at 2024-05-04 22:15:57.212526318 +0800 CST m=+5.335731625 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:17:57.221 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:15:57.217 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:05:57.217 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd8a9340014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-3jhbg-sn2sk, pid:30375, start at 2024-05-04 22:15:57.285282161 +0800 CST m=+5.342183633 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:17:57.292 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:15:57.261 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:05:57.261 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/autorandom/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/autorandom/tiflash/log/error.log arg matches is ArgMatches { args: {"log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/autorandom/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/autorandom/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/autorandom/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table test.finish_mark not exists for 4-th check, retry later table mark.finish_mark not exists for 5-th check, retry later [2024/05/04 22:16:03.245 +08:00] [INFO] [case.go:115] ["sync updatePKUK take: 16.840328096s"] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.13801.out cli unsafe resolve-lock --region=3 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/simple/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Sat May 4 22:16:03 CST 2024] <<<<<< START cdc server in autorandom case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.autorandom.3176531767.out server --log-file /tmp/tidb_cdc_test/autorandom/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/autorandom/cdc_data --cluster-id default + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... table test.finish_mark not exists for 5-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/kafka_column_selector Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table mark.finish_mark exists check diff successfully wait process cdc.test exit for 1-th time... + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.13837.out cli unsafe resolve-lock --region=3 --ts=449532961959706632 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... wait process cdc.test exit for 2-th time... table test.finish_mark not exists for 6-th check, retry later wait process cdc.test exit for 3-th time... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 22:16:07 CST 2024] <<<<<< run test case multi_source success! >>>>>> < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:16:07 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/de27dfac-7708-4417-8bb7-16e9c23f9568 {"id":"de27dfac-7708-4417-8bb7-16e9c23f9568","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832164} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f6033ed5 de27dfac-7708-4417-8bb7-16e9c23f9568 /tidb/cdc/default/default/upstream/7365147995591418104 {"id":7365147995591418104,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/de27dfac-7708-4417-8bb7-16e9c23f9568 {"id":"de27dfac-7708-4417-8bb7-16e9c23f9568","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832164} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f6033ed5 de27dfac-7708-4417-8bb7-16e9c23f9568 /tidb/cdc/default/default/upstream/7365147995591418104 {"id":7365147995591418104,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/de27dfac-7708-4417-8bb7-16e9c23f9568 {"id":"de27dfac-7708-4417-8bb7-16e9c23f9568","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832164} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f6033ed5 de27dfac-7708-4417-8bb7-16e9c23f9568 /tidb/cdc/default/default/upstream/7365147995591418104 {"id":7365147995591418104,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Verifying downstream PD is started... + set +x % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 218 100 212 100 6 156k 4538 --:--:-- --:--:-- --:--:-- 207k { "error_msg": "[CDC:ErrAPIInvalidParam]invalid log level: json: cannot unmarshal string into Go value of type struct { Level string \"json:\\\"log_level\\\"\" }", "error_code": "CDC:ErrAPIInvalidParam" Create changefeed successfully! ID: d435e3e9-e02a-4121-8be4-2a5955295636 Info: {"upstream_id":7365147995591418104,"namespace":"default","id":"d435e3e9-e02a-4121-8be4-2a5955295636","sink_uri":"kafka://127.0.0.1:9092/ticdc-autorandom-test-1986?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:16:07.309112684+08:00","start_ts":449532963629826051,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532963629826051,"checkpoint_ts":449532963629826051,"checkpoint_time":"2024-05-04 22:16:07.167"} [Sat May 4 22:16:07 CST 2024] <<<<<< START kafka consumer in autorandom case >>>>>> Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table autorandom_test.table_a not exists for 1-th check, retry later table test.finish_mark not exists for 7-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table autorandom_test.table_a not exists for 2-th check, retry later table test.finish_mark not exists for 8-th check, retry later } % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 182 100 182 0 0 214k 0 --:--:-- --:--:-- --:--:-- 177k { "version": "v8.2.0-alpha-52-g6a342866d", "git_hash": "6a342866deda3271b067f649c64b771bbe3d2a00", "id": "4bbe4da4-649b-404e-89b7-9e1f87c7c8ee", "pid": 12831, "is_owner": true }wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:16:12 CST 2024] <<<<<< run test case cli_with_auth success! >>>>>> table autorandom_test.table_a exists check diff successfully wait process cdc.test exit for 1-th time... start tidb cluster in /tmp/tidb_cdc_test/simple Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... \033[0;36m<<< Run all test success >>>\033[0m [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc already exists) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // withCredentials [Pipeline] } wait process cdc.test exit for 2-th time... [Pipeline] // timeout [Pipeline] } table test.finish_mark exists check diff successfully [Pipeline] // stage [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] } wait process cdc.test exit for 1-th time... [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] } cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:16:13 CST 2024] <<<<<< run test case autorandom success! >>>>>> [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] } wait process cdc.test exit for 2-th time... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... wait process cdc.test exit for 3-th time... cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 22:16:14 CST 2024] <<<<<< run test case kafka_simple_basic_avro success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd9b8cc0016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6, pid:15557, start at 2024-05-04 22:16:14.692631144 +0800 CST m=+5.299752914 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:18:14.700 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:16:14.693 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:06:14.693 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd9b8cc0016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6, pid:15557, start at 2024-05-04 22:16:14.692631144 +0800 CST m=+5.299752914 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:18:14.700 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:16:14.693 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:06:14.693 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fd9b8c00016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6, pid:15629, start at 2024-05-04 22:16:14.682717671 +0800 CST m=+5.239193225 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:18:14.690 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:16:14.689 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:06:14.689 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Logging trace to /tmp/tidb_cdc_test/kafka_column_selector/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_column_selector/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_column_selector/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_column_selector/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_column_selector/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } \033[0;36m<<< Run all test success >>>\033[0m [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc already exists) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Sat May 4 22:16:18 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>> schema registry uri found: 10 [Sat May 4 22:16:18 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>> schema registry uri found: 20 [Sat May 4 22:16:18 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>> schema registry uri found: finish table test.finish not exists for 1-th check, retry later [Pipeline] // timeout [Sat May 4 22:16:19 CST 2024] <<<<<< START cdc server in kafka_column_selector case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_column_selector.1705417056.out server --log-file /tmp/tidb_cdc_test/kafka_column_selector/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_column_selector/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 [Pipeline] } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] // stage [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] } + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv table test.finish not exists for 2-th check, retry later [Pipeline] } [Pipeline] // stage [Pipeline] } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.finish exists check diff successfully wait process cdc.test exit for 1-th time... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:16:22 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/fd64120f-312e-4c9b-92d1-229ccd773809 {"id":"fd64120f-312e-4c9b-92d1-229ccd773809","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832180} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f645c1cc fd64120f-312e-4c9b-92d1-229ccd773809 /tidb/cdc/default/default/upstream/7365148071882785202 {"id":7365148071882785202,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/fd64120f-312e-4c9b-92d1-229ccd773809 {"id":"fd64120f-312e-4c9b-92d1-229ccd773809","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832180} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f645c1cc fd64120f-312e-4c9b-92d1-229ccd773809 /tidb/cdc/default/default/upstream/7365148071882785202 {"id":7365148071882785202,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/fd64120f-312e-4c9b-92d1-229ccd773809 {"id":"fd64120f-312e-4c9b-92d1-229ccd773809","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832180} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f645c1cc fd64120f-312e-4c9b-92d1-229ccd773809 /tidb/cdc/default/default/upstream/7365148071882785202 {"id":7365148071882785202,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_column_selector.cli.17114.out cli changefeed create --start-ts=449532966939656193 '--sink-uri=kafka://127.0.0.1:9092/column-selector-test?protocol=canal-json&partition-num=1&enable-tidb-extension=true' -c test --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_column_selector/conf/changefeed.toml Create changefeed successfully! ID: test Info: {"upstream_id":7365148071882785202,"namespace":"default","id":"test","sink_uri":"kafka://127.0.0.1:9092/column-selector-test?protocol=canal-json\u0026partition-num=1\u0026enable-tidb-extension=true","create_time":"2024-05-04T22:16:23.375173124+08:00","start_ts":449532966939656193,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"column_selectors":[{"matcher":["test.t1"],"columns":["a","b"]},{"matcher":["test.*"],"columns":["*","!b"]},{"matcher":["test1.t1"],"columns":["column*","!column1"]}],"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532966939656193,"checkpoint_ts":449532966939656193,"checkpoint_time":"2024-05-04 22:16:19.793"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:16:23 CST 2024] <<<<<< run test case multi_topics_v2 success! >>>>>> VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fda3e2c000e Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m, pid:21520, start at 2024-05-04 22:16:23.196918432 +0800 CST m=+5.481823935 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:18:23.204 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:16:23.179 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:06:23.179 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fda3e2c000e Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m, pid:21520, start at 2024-05-04 22:16:23.196918432 +0800 CST m=+5.481823935 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:18:23.204 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:16:23.179 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:06:23.179 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fda3f000015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m, pid:21594, start at 2024-05-04 22:16:23.28187089 +0800 CST m=+5.506680561 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:18:23.289 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:16:23.281 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:06:23.281 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/simple/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/simple/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/simple/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/simple/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/simple/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + set +x =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/generate_column/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.simple.cli.22907.out cli tso query --pd=http://127.0.0.1:2379 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/savepoint/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... start tidb cluster in /tmp/tidb_cdc_test/savepoint Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... + set +x + tso='449532968836530178 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532968836530178 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x start tidb cluster in /tmp/tidb_cdc_test/generate_column Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... [Sat May 4 22:16:28 CST 2024] <<<<<< START cdc server in simple case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.simple.2295822960.out server --log-file /tmp/tidb_cdc_test/simple/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/simple/cdc_data --cluster-id default + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Starting build checksum checker... go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading github.com/spf13/cobra v1.8.0 go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading golang.org/x/net v0.24.0 go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/apache/pulsar-client-go v0.11.0 go: downloading github.com/aws/aws-sdk-go-v2 v1.19.1 go: downloading github.com/pierrec/lz4/v4 v4.1.18 go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading github.com/xdg/scram v1.0.5 go: downloading github.com/IBM/sarama v1.41.2 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/gin-gonic/gin v1.9.1 go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading github.com/tinylib/msgp v1.1.6 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/KimMachineGun/automemlimit v0.2.4 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading golang.org/x/sync v0.7.0 go: downloading github.com/coreos/go-semver v0.3.1 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/phayes/freeport v0.0.0-20180830031419-95f893ade6f2 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/stretchr/testify v1.9.0 go: downloading golang.org/x/time v0.5.0 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading github.com/containerd/cgroups v1.0.4 go: downloading github.com/xdg/stringprep v1.0.3 go: downloading golang.org/x/crypto v0.22.0 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading github.com/philhofer/fwd v1.1.1 go: downloading golang.org/x/text v0.14.0 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading github.com/stretchr/objx v0.5.2 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd go: downloading github.com/opencontainers/runtime-spec v1.0.2 go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/godbus/dbus/v5 v5.0.4 go: downloading golang.org/x/sys v0.19.0 go: downloading github.com/gin-contrib/sse v0.1.0 go: downloading github.com/cilium/ebpf v0.4.0 go: downloading github.com/sirupsen/logrus v1.9.3 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/go-playground/validator/v10 v10.14.0 go: downloading github.com/pelletier/go-toml/v2 v2.0.8 go: downloading github.com/ugorji/go/codec v1.2.11 go: downloading google.golang.org/protobuf v1.33.0 go: downloading cloud.google.com/go v0.112.2 go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 go: downloading github.com/aws/smithy-go v1.13.5 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/prometheus/common v0.52.2 Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/golang/protobuf v1.5.4 go: downloading github.com/bits-and-blooms/bitset v1.4.0 go: downloading github.com/linkedin/goavro/v2 v2.11.1 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/eapache/go-resiliency v1.4.0 go: downloading github.com/eapache/go-xerial-snappy v0.0.0-20230731223053-c322873962e3 go: downloading github.com/eapache/queue v1.1.0 go: downloading github.com/hashicorp/go-multierror v1.1.1 go: downloading github.com/jcmturner/gofork v1.7.6 go: downloading github.com/jcmturner/gokrb5/v8 v8.4.4 go: downloading github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 go: downloading github.com/AthenZ/athenz v1.10.39 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading github.com/spaolacci/murmur3 v1.1.0 go: downloading golang.org/x/mod v0.17.0 go: downloading github.com/DataDog/zstd v1.5.5 go: downloading github.com/pierrec/lz4 v2.6.1+incompatible go: downloading github.com/gabriel-vasile/mimetype v1.4.2 go: downloading github.com/go-playground/universal-translator v0.18.1 go: downloading github.com/leodido/go-urn v1.2.4 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading github.com/google/uuid v1.6.0 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 start tidb cluster in /tmp/tidb_cdc_test/kafka_simple_handle_key_only Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... go: downloading google.golang.org/api v0.170.0 go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda go: downloading github.com/hashicorp/errwrap v1.0.0 go: downloading github.com/golang/snappy v0.0.4 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading github.com/99designs/keyring v1.2.1 go: downloading github.com/jcmturner/dnsutils/v2 v2.0.0 go: downloading github.com/hashicorp/go-uuid v1.0.3 go: downloading github.com/go-playground/locales v0.14.1 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/jcmturner/rpc/v2 v2.0.3 go: downloading github.com/dvsekhvalnov/jose2go v1.5.0 go: downloading github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c go: downloading github.com/mtibben/percent v0.2.1 go: downloading golang.org/x/term v0.19.0 go: downloading github.com/jcmturner/aescts/v2 v2.0.0 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading github.com/go-logr/logr v1.4.1 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/go-logr/stdr v1.2.2 go: downloading github.com/ardielle/ardielle-go v1.5.2 go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading github.com/google/btree v1.1.2 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading golang.org/x/tools v0.20.0 go: downloading gopkg.in/yaml.v2 v2.4.0 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:16:31 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/17314fe8-ea66-4198-aa99-0af0a9177982 {"id":"17314fe8-ea66-4198-aa99-0af0a9177982","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832189} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f66236ee 17314fe8-ea66-4198-aa99-0af0a9177982 /tidb/cdc/default/default/upstream/7365148102600844728 {"id":7365148102600844728,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/17314fe8-ea66-4198-aa99-0af0a9177982 {"id":"17314fe8-ea66-4198-aa99-0af0a9177982","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832189} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f66236ee 17314fe8-ea66-4198-aa99-0af0a9177982 /tidb/cdc/default/default/upstream/7365148102600844728 {"id":7365148102600844728,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/17314fe8-ea66-4198-aa99-0af0a9177982 {"id":"17314fe8-ea66-4198-aa99-0af0a9177982","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832189} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f66236ee 17314fe8-ea66-4198-aa99-0af0a9177982 /tidb/cdc/default/default/upstream/7365148102600844728 {"id":7365148102600844728,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.simple.cli.23015.out cli changefeed create --start-ts=449532968836530178 '--sink-uri=kafka+ssl://127.0.0.1:9092/ticdc-simple-test-22255?protocol=open-protocol&partition-num=4&kafka-client-id=cdc_test_simple&kafka-version=2.4.1&max-message-bytes=10485760' Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading github.com/kr/text v0.2.0 Create changefeed successfully! ID: 15d4f705-65a2-42c2-82a8-99ea809465b9 Info: {"upstream_id":7365148102600844728,"namespace":"default","id":"15d4f705-65a2-42c2-82a8-99ea809465b9","sink_uri":"kafka+ssl://127.0.0.1:9092/ticdc-simple-test-22255?protocol=open-protocol\u0026partition-num=4\u0026kafka-client-id=cdc_test_simple\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:16:32.301367716+08:00","start_ts":449532968836530178,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532968836530178,"checkpoint_ts":449532968836530178,"checkpoint_time":"2024-05-04 22:16:27.029"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release go: downloading github.com/jmespath/go-jmespath v0.4.0 go: downloading github.com/google/s2a-go v0.1.7 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading github.com/felixge/httpsnoop v1.0.4 + set +x [Sat May 4 22:16:33 CST 2024] <<<<<< START kafka consumer in simple case >>>>>> succeed to verify meta placement rules ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 1146 (42S02) at line 1: Table 'test.simple1' doesn't exist check data failed 1-th time, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 22:16:30.274 +08:00] [WARN] [diff.go:182] ["table struct is not equal"] [reason="column num not equal, one is 5 another is 4"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 1146 (42S02) at line 1: Table 'test.simple1' doesn't exist check data failed 2-th time, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/storage_cleanup/run.sh using Sink-Type: kafka... <<================= +++ dirname /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/storage_cleanup/run.sh ++ cd /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/storage_cleanup ++ pwd + CUR=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/storage_cleanup + source /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/storage_cleanup/../_utils/test_prepare ++ UP_TIDB_HOST=127.0.0.1 ++ UP_TIDB_PORT=4000 ++ UP_TIDB_OTHER_PORT=4001 ++ UP_TIDB_STATUS=10080 ++ UP_TIDB_OTHER_STATUS=10081 ++ DOWN_TIDB_HOST=127.0.0.1 ++ DOWN_TIDB_PORT=3306 ++ DOWN_TIDB_STATUS=20080 ++ TLS_TIDB_HOST=127.0.0.1 ++ TLS_TIDB_PORT=3307 ++ TLS_TIDB_STATUS=30080 ++ UP_PD_HOST_1=127.0.0.1 ++ UP_PD_PORT_1=2379 ++ UP_PD_PEER_PORT_1=2380 ++ UP_PD_HOST_2=127.0.0.1 ++ UP_PD_PORT_2=2679 ++ UP_PD_PEER_PORT_2=2680 ++ UP_PD_HOST_3=127.0.0.1 ++ UP_PD_PORT_3=2779 ++ UP_PD_PEER_PORT_3=2780 ++ DOWN_PD_HOST=127.0.0.1 ++ DOWN_PD_PORT=2479 ++ DOWN_PD_PEER_PORT=2480 ++ TLS_PD_HOST=127.0.0.1 ++ TLS_PD_PORT=2579 ++ TLS_PD_PEER_PORT=2580 ++ UP_TIKV_HOST_1=127.0.0.1 ++ UP_TIKV_PORT_1=20160 ++ UP_TIKV_STATUS_PORT_1=20181 ++ UP_TIKV_HOST_2=127.0.0.1 ++ UP_TIKV_PORT_2=20161 ++ UP_TIKV_STATUS_PORT_2=20182 ++ UP_TIKV_HOST_3=127.0.0.1 ++ UP_TIKV_PORT_3=20162 ++ UP_TIKV_STATUS_PORT_3=20183 ++ DOWN_TIKV_HOST=127.0.0.1 ++ DOWN_TIKV_PORT=21160 ++ DOWN_TIKV_STATUS_PORT=21180 ++ TLS_TIKV_HOST=127.0.0.1 ++ TLS_TIKV_PORT=22160 ++ TLS_TIKV_STATUS_PORT=22180 +++ cat /tmp/tidb_cdc_test/KAFKA_VERSION +++ echo 2.4.1 ++ KAFKA_VERSION=2.4.1 + WORK_DIR=/tmp/tidb_cdc_test/storage_cleanup + CDC_BINARY=cdc.test + SINK_TYPE=kafka + EXIST_FILES=() + CLEANED_FILES=() + trap stop_tidb_cluster EXIT + run kafka + '[' kafka '!=' storage ']' + return + check_logs /tmp/tidb_cdc_test/storage_cleanup ++ date + echo '[Sat May 4 22:16:35 CST 2024] <<<<<< run test case storage_cleanup success! >>>>>>' [Sat May 4 22:16:35 CST 2024] <<<<<< run test case storage_cleanup success! >>>>>> + stop_tidb_cluster ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fdb14640012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw, pid:14985, start at 2024-05-04 22:16:36.914814294 +0800 CST m=+5.217440850 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:18:36.921 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:16:36.889 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:06:36.889 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) TEST FAILED: OUTPUT DOES NOT CONTAIN 'id: 1' ____________________________________ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ check data failed 3-th time, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/csv_storage_basic/run.sh using Sink-Type: kafka... <<================= [Sat May 4 22:16:38 CST 2024] <<<<<< run test case csv_storage_basic success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fdb14640012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw, pid:14985, start at 2024-05-04 22:16:36.914814294 +0800 CST m=+5.217440850 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:18:36.921 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:16:36.889 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:06:36.889 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fdb15280015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw, pid:15066, start at 2024-05-04 22:16:36.973887285 +0800 CST m=+5.220347596 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:18:36.980 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:16:36.938 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:06:36.938 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/savepoint/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/savepoint/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/savepoint/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/savepoint/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/savepoint/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check data successfully wait process cdc.test exit for 1-th time... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 2-th time... wait process cdc.test exit for 3-th time... [2024/05/04 22:16:40.973 +08:00] [WARN] [diff.go:551] ["checksum is not equal"] [table=`test`.`ntest`] [where="((TRUE) AND TRUE)"] ["source checksum"=0] ["target checksum"=1242262498] ["get source checksum cost"=2.133249ms] ["get target checksum cost"=3.480554ms] =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/csv_storage_multi_tables_ddl/run.sh using Sink-Type: kafka... <<================= [Sat May 4 22:16:41 CST 2024] <<<<<< run test case csv_storage_multi_tables_ddl success! >>>>>> + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.savepoint.cli.16411.out cli tso query --pd=http://127.0.0.1:2379 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fdb38d40003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2, pid:11646, start at 2024-05-04 22:16:39.224571059 +0800 CST m=+5.251691782 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:18:39.231 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:16:39.221 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:06:39.221 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fdb38d40003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2, pid:11646, start at 2024-05-04 22:16:39.224571059 +0800 CST m=+5.251691782 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:18:39.231 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:16:39.221 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:06:39.221 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fdb3c180002 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2, pid:11730, start at 2024-05-04 22:16:39.431525275 +0800 CST m=+5.402228234 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:18:39.439 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:16:39.430 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:06:39.430 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_simple_handle_key_only/tiflash-proxy.toml"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_simple_handle_key_only/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_simple_handle_key_only/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 22:16:42 CST 2024] <<<<<< run test case simple success! >>>>>> VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fdb45f0001c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-3jhbg-sn2sk, pid:33067, start at 2024-05-04 22:16:40.109565003 +0800 CST m=+5.253872675 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:18:40.118 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:16:40.110 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:06:40.110 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fdb45f0001c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-3jhbg-sn2sk, pid:33067, start at 2024-05-04 22:16:40.109565003 +0800 CST m=+5.253872675 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:18:40.118 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:16:40.110 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:06:40.110 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fdb471c0017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-3jhbg-sn2sk, pid:33142, start at 2024-05-04 22:16:40.183397814 +0800 CST m=+5.274458381 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:18:40.189 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:16:40.185 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:06:40.185 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/generate_column/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/generate_column/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/generate_column/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/generate_column/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/generate_column/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + set +x + tso='449532972876169217 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532972876169217 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 22:16:43 CST 2024] <<<<<< START cdc server in savepoint case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.savepoint.1644716449.out server --log-file /tmp/tidb_cdc_test/savepoint/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/savepoint/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [Sat May 4 22:16:44 CST 2024] <<<<<< START cdc server in kafka_simple_handle_key_only case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) + GO_FAILPOINTS= + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only.1304813050.out server --log-file /tmp/tidb_cdc_test/kafka_simple_handle_key_only/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_simple_handle_key_only/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.generate_column.cli.34486.out cli tso query --pd=http://127.0.0.1:2379 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/csv_storage_partition_table/run.sh using Sink-Type: kafka... <<================= [Sat May 4 22:16:45 CST 2024] <<<<<< run test case csv_storage_partition_table success! >>>>>> + set +x + tso='449532973681475586 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532973681475586 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 22:16:47 CST 2024] <<<<<< START cdc server in generate_column case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.generate_column.3452434526.out server --log-file /tmp/tidb_cdc_test/generate_column/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/generate_column/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:16:47 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/49adafcb-5d7f-46d3-8d73-daae93a957a0 {"id":"49adafcb-5d7f-46d3-8d73-daae93a957a0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832204} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f6a2b4d3 49adafcb-5d7f-46d3-8d73-daae93a957a0 /tidb/cdc/default/default/upstream/7365148166399341453 {"id":7365148166399341453,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/49adafcb-5d7f-46d3-8d73-daae93a957a0 {"id":"49adafcb-5d7f-46d3-8d73-daae93a957a0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832204} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f6a2b4d3 49adafcb-5d7f-46d3-8d73-daae93a957a0 /tidb/cdc/default/default/upstream/7365148166399341453 {"id":7365148166399341453,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/49adafcb-5d7f-46d3-8d73-daae93a957a0 {"id":"49adafcb-5d7f-46d3-8d73-daae93a957a0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832204} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f6a2b4d3 49adafcb-5d7f-46d3-8d73-daae93a957a0 /tidb/cdc/default/default/upstream/7365148166399341453 {"id":7365148166399341453,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.savepoint.cli.16501.out cli changefeed create --start-ts=449532972876169217 '--sink-uri=kafka://127.0.0.1:9092/ticdc-savepoint-test-10136?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > Create changefeed successfully! ID: 0e50229b-7b3a-4a0a-9d48-0f8b66b61ce7 Info: {"upstream_id":7365148166399341453,"namespace":"default","id":"0e50229b-7b3a-4a0a-9d48-0f8b66b61ce7","sink_uri":"kafka://127.0.0.1:9092/ticdc-savepoint-test-10136?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:16:47.53106721+08:00","start_ts":449532972876169217,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532972876169217,"checkpoint_ts":449532972876169217,"checkpoint_time":"2024-05-04 22:16:42.439"} PASS < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:16:47 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/80bd364b-bc2f-4e88-b907-71fa43e595e1 {"id":"80bd364b-bc2f-4e88-b907-71fa43e595e1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832204} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f6a7b5ce 80bd364b-bc2f-4e88-b907-71fa43e595e1 /tidb/cdc/default/default/upstream/7365148178109874104 {"id":7365148178109874104,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/80bd364b-bc2f-4e88-b907-71fa43e595e1 {"id":"80bd364b-bc2f-4e88-b907-71fa43e595e1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832204} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f6a7b5ce 80bd364b-bc2f-4e88-b907-71fa43e595e1 /tidb/cdc/default/default/upstream/7365148178109874104 {"id":7365148178109874104,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/80bd364b-bc2f-4e88-b907-71fa43e595e1 {"id":"80bd364b-bc2f-4e88-b907-71fa43e595e1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832204} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f6a7b5ce 80bd364b-bc2f-4e88-b907-71fa43e595e1 /tidb/cdc/default/default/upstream/7365148178109874104 {"id":7365148178109874104,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only.cli.13112.out cli tso query --pd=http://127.0.0.1:2379 coverage: 2.4% of statements in github.com/pingcap/tiflow/... \033[0;36m<<< Run all test success >>>\033[0m [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc already exists) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] } + set +x [Sat May 4 22:16:49 CST 2024] <<<<<< START kafka consumer in savepoint case >>>>>> [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } table savepoint.finish_mark not exists for 1-th check, retry later [Pipeline] // podTemplate [Pipeline] } + set +x + tso='449532974260813828 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532974260813828 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only.cli.13155.out cli changefeed create --start-ts=449532974260813828 '--sink-uri=kafka://127.0.0.1:9092/simple-handle-key-only-4143?protocol=simple' -c simple-handle-key-only --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only/conf/changefeed.toml [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] } Create changefeed successfully! ID: simple-handle-key-only Info: {"upstream_id":7365148178109874104,"namespace":"default","id":"simple-handle-key-only","sink_uri":"kafka://127.0.0.1:9092/simple-handle-key-only-4143?protocol=simple","create_time":"2024-05-04T22:16:49.65281345+08:00","start_ts":449532974260813828,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"simple","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"kafka_config":{"large_message_handle":{"large_message_handle_option":"handle-key-only","large_message_handle_compression":"lz4","claim_check_storage_uri":""}},"advance_timeout":150,"send_bootstrap_interval_in_sec":0,"send_bootstrap_in_msg_count":0,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532974260813828,"checkpoint_ts":449532974260813828,"checkpoint_time":"2024-05-04 22:16:47.721"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:16:50 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2fe49a43-f2ad-465c-8787-a14754fd0eb6 {"id":"2fe49a43-f2ad-465c-8787-a14754fd0eb6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832207} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f6a3e4d3 2fe49a43-f2ad-465c-8787-a14754fd0eb6 /tidb/cdc/default/default/upstream/7365148178065270966 {"id":7365148178065270966,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2fe49a43-f2ad-465c-8787-a14754fd0eb6 {"id":"2fe49a43-f2ad-465c-8787-a14754fd0eb6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832207} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f6a3e4d3 2fe49a43-f2ad-465c-8787-a14754fd0eb6 /tidb/cdc/default/default/upstream/7365148178065270966 {"id":7365148178065270966,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2fe49a43-f2ad-465c-8787-a14754fd0eb6 {"id":"2fe49a43-f2ad-465c-8787-a14754fd0eb6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832207} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f6a3e4d3 2fe49a43-f2ad-465c-8787-a14754fd0eb6 /tidb/cdc/default/default/upstream/7365148178065270966 {"id":7365148178065270966,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.generate_column.cli.34574.out cli changefeed create --start-ts=449532973681475586 '--sink-uri=kafka://127.0.0.1:9092/ticdc-generate-column-test-7328?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: 30328133-2bc2-4be5-bf7a-e8b58217ee7c Info: {"upstream_id":7365148178065270966,"namespace":"default","id":"30328133-2bc2-4be5-bf7a-e8b58217ee7c","sink_uri":"kafka://127.0.0.1:9092/ticdc-generate-column-test-7328?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:16:50.609758605+08:00","start_ts":449532973681475586,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532973681475586,"checkpoint_ts":449532973681475586,"checkpoint_time":"2024-05-04 22:16:45.511"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + set +x table savepoint.finish_mark not exists for 2-th check, retry later + set +x [Sat May 4 22:16:52 CST 2024] <<<<<< START kafka consumer in generate_column case >>>>>> table savepoint.finish_mark exists check diff successfully table generate_column.t not exists for 1-th check, retry later wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:16:55 CST 2024] <<<<<< run test case savepoint success! >>>>>> table generate_column.t exists table generate_column.t1 not exists for 1-th check, retry later table generate_column.t1 exists ++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 221 100 221 0 0 2730 0 --:--:-- --:--:-- --:--:-- 2762 + synced_status='{"synced":true,"sink_checkpoint_ts":"2024-05-04 22:16:44.218","puller_resolved_ts":"2024-05-04 22:16:38.168","last_synced_ts":"2024-05-04 22:14:28.668","now_ts":"2024-05-04 22:16:45.000","info":"Data syncing is finished"}' ++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-04' '22:16:44.218","puller_resolved_ts":"2024-05-04' '22:16:38.168","last_synced_ts":"2024-05-04' '22:14:28.668","now_ts":"2024-05-04' '22:16:45.000","info":"Data' syncing is 'finished"}' ++ jq .synced + status=true + '[' true '!=' true ']' + kill_pd ++ ps aux ++ grep pd-server ++ grep /tmp/tidb_cdc_test/synced_status_with_redo + info='jenkins 9822 8.2 0.0 13326356 144944 ? Sl 22:14 0:13 pd-server --advertise-client-urls http://127.0.0.1:2379 --client-urls http://0.0.0.0:2379 --advertise-peer-urls http://127.0.0.1:2380 --peer-urls http://0.0.0.0:2380 --config /tmp/tidb_cdc_test/synced_status_with_redo/pd-config.toml --log-file /tmp/tidb_cdc_test/synced_status_with_redo/pd1.log --data-dir /tmp/tidb_cdc_test/synced_status_with_redo/pd1 --name=pd1 --initial-cluster=pd1=http://127.0.0.1:2380 jenkins 9883 5.6 0.0 13594072 138924 ? Sl 22:14 0:08 pd-server --advertise-client-urls http://127.0.0.1:2479 --client-urls http://0.0.0.0:2479 --advertise-peer-urls http://127.0.0.1:2480 --peer-urls http://0.0.0.0:2480 --config /tmp/tidb_cdc_test/synced_status_with_redo/pd-config.toml --log-file /tmp/tidb_cdc_test/synced_status_with_redo/down_pd.log --data-dir /tmp/tidb_cdc_test/synced_status_with_redo/down_pd' ++ ps aux ++ grep pd-server ++ grep /tmp/tidb_cdc_test/synced_status_with_redo ++ awk '{print $2}' ++ xargs kill -9 + sleep 20 {"level":"warn","ts":1714832211.7611966,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc003a161c0/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""} {"level":"info","ts":1714832211.7612517,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"} {"level":"warn","ts":1714832211.761409,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc003a91180/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"info","ts":1714832211.761453,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"} {"level":"warn","ts":1714832212.2114058,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc002093880/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"info","ts":1714832212.2114568,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"} {"level":"warn","ts":"2024-05-04T22:16:56.07982+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000dc3180/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-05-04T22:16:56.088543+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001287880/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-05-04T22:16:56.148656+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000e81180/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""} check diff failed 1-th time, retry later + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only.cli.13208.out cli changefeed pause -c simple-handle-key-only PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only.cli.13250.out cli changefeed update -c simple-handle-key-only '--sink-uri=kafka://127.0.0.1:9092/simple-handle-key-only-4143?protocol=simple&max-message-bytes=700' --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only/conf/changefeed.toml --no-confirm Diff of changefeed config: {Type:update Path:[SinkURI] From:kafka://127.0.0.1:9092/simple-handle-key-only-4143?protocol=simple To:kafka://127.0.0.1:9092/simple-handle-key-only-4143?protocol=simple&max-message-bytes=700} {Type:update Path:[Config SyncPointInterval] From: To:0xc000a69b58} {Type:update Path:[Config SyncPointRetention] From: To:0xc000a69b68} {Type:update Path:[Config Consistent] From: To:0xc000a16150} Update changefeed config successfully! ID: simple-handle-key-only Info: {"upstream_id":7365148178109874104,"namespace":"default","id":"simple-handle-key-only","sink_uri":"kafka://127.0.0.1:9092/simple-handle-key-only-4143?protocol=simple\u0026max-message-bytes=700","create_time":"2024-05-04T22:16:49.65281345+08:00","start_ts":449532974260813828,"admin_job_type":1,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_table_monitor":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"simple","encoder_concurrency":32,"terminator":"\r\n","enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"kafka_config":{"large_message_handle":{"large_message_handle_option":"handle-key-only","large_message_handle_compression":"lz4","claim_check_storage_uri":""}},"advance_timeout":150,"send_bootstrap_interval_in_sec":0,"send_bootstrap_in_msg_count":0,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"stopped","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":0,"checkpoint_ts":449532976698753028,"checkpoint_time":"2024-05-04 22:16:57.021"} PASS =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/cdc_server_tips/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... coverage: 2.4% of statements in github.com/pingcap/tiflow/... check diff successfully wait process cdc.test exit for 1-th time... + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only.cli.13284.out cli changefeed resume -c simple-handle-key-only start tidb cluster in /tmp/tidb_cdc_test/cdc_server_tips Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... wait process cdc.test exit for 2-th time... PASS coverage: 2.1% of statements in github.com/pingcap/tiflow/... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:17:02 CST 2024] <<<<<< run test case generate_column success! >>>>>> {"level":"warn","ts":"2024-05-04T22:17:02.080741+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000dc3180/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-05-04T22:17:02.09031+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001287880/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-05-04T22:17:02.148831+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000e81180/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""} + set +x table test.finish_mark not exists for 1-th check, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed table test.finish_mark not exists for 2-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status/run.sh using Sink-Type: kafka... <<================= +++ dirname /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status/run.sh ++ cd /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status ++ pwd + CUR=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status + source /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status/../_utils/test_prepare ++ UP_TIDB_HOST=127.0.0.1 ++ UP_TIDB_PORT=4000 ++ UP_TIDB_OTHER_PORT=4001 ++ UP_TIDB_STATUS=10080 ++ UP_TIDB_OTHER_STATUS=10081 ++ DOWN_TIDB_HOST=127.0.0.1 ++ DOWN_TIDB_PORT=3306 ++ DOWN_TIDB_STATUS=20080 ++ TLS_TIDB_HOST=127.0.0.1 ++ TLS_TIDB_PORT=3307 ++ TLS_TIDB_STATUS=30080 ++ UP_PD_HOST_1=127.0.0.1 ++ UP_PD_PORT_1=2379 ++ UP_PD_PEER_PORT_1=2380 ++ UP_PD_HOST_2=127.0.0.1 ++ UP_PD_PORT_2=2679 ++ UP_PD_PEER_PORT_2=2680 ++ UP_PD_HOST_3=127.0.0.1 ++ UP_PD_PORT_3=2779 ++ UP_PD_PEER_PORT_3=2780 ++ DOWN_PD_HOST=127.0.0.1 ++ DOWN_PD_PORT=2479 ++ DOWN_PD_PEER_PORT=2480 ++ TLS_PD_HOST=127.0.0.1 ++ TLS_PD_PORT=2579 ++ TLS_PD_PEER_PORT=2580 ++ UP_TIKV_HOST_1=127.0.0.1 ++ UP_TIKV_PORT_1=20160 ++ UP_TIKV_STATUS_PORT_1=20181 ++ UP_TIKV_HOST_2=127.0.0.1 ++ UP_TIKV_PORT_2=20161 ++ UP_TIKV_STATUS_PORT_2=20182 ++ UP_TIKV_HOST_3=127.0.0.1 ++ UP_TIKV_PORT_3=20162 ++ UP_TIKV_STATUS_PORT_3=20183 ++ DOWN_TIKV_HOST=127.0.0.1 ++ DOWN_TIKV_PORT=21160 ++ DOWN_TIKV_STATUS_PORT=21180 ++ TLS_TIKV_HOST=127.0.0.1 ++ TLS_TIKV_PORT=22160 ++ TLS_TIKV_STATUS_PORT=22180 +++ cat /tmp/tidb_cdc_test/KAFKA_VERSION +++ echo 2.4.1 ++ KAFKA_VERSION=2.4.1 + WORK_DIR=/tmp/tidb_cdc_test/synced_status + CDC_BINARY=cdc.test + SINK_TYPE=kafka + CDC_COUNT=3 + DB_COUNT=4 + trap stop_tidb_cluster EXIT + run_normal_case_and_unavailable_pd conf/changefeed.toml + rm -rf /tmp/tidb_cdc_test/synced_status + mkdir -p /tmp/tidb_cdc_test/synced_status + start_tidb_cluster --workdir /tmp/tidb_cdc_test/synced_status The 1 times to try to start tidb cluster... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:01 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:02 --:--:-- 0{"level":"warn","ts":"2024-05-04T22:17:08.082092+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000dc3180/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-05-04T22:17:08.092333+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001287880/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-05-04T22:17:08.149464+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000e81180/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""} table test.finish_mark exists check diff successfully wait process cdc.test exit for 1-th time... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:17:09 CST 2024] <<<<<< run test case kafka_simple_handle_key_only success! >>>>>> start tidb cluster in /tmp/tidb_cdc_test/synced_status Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) 0 0 0 0 0 0 0 0 --:--:-- 0:00:03 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:04 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:05 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:06 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:07 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:08 --:--:-- 0{"level":"warn","ts":"2024-05-04T22:17:14.083357+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000dc3180/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-05-04T22:17:14.093576+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001287880/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-05-04T22:17:14.150849+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000e81180/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""} Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fdd3f240014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m, pid:24342, start at 2024-05-04 22:17:12.438970118 +0800 CST m=+5.348358099 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:19:12.447 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:17:12.443 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:07:12.443 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fdd3f240014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m, pid:24342, start at 2024-05-04 22:17:12.438970118 +0800 CST m=+5.348358099 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:19:12.447 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:17:12.443 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:07:12.443 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fdd40c0000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m, pid:24414, start at 2024-05-04 22:17:12.514129264 +0800 CST m=+5.369382404 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:19:12.523 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:17:12.496 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:07:12.496 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/cdc_server_tips/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/cdc_server_tips/tiflash/log/error.log arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/cdc_server_tips/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/cdc_server_tips/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/cdc_server_tips/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } 0 0 0 0 0 0 0 0 --:--:-- 0:00:09 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:10 --:--:-- 0{"level":"warn","ts":"2024-05-04T22:17:16.07402+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000dc3180/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"info","ts":"2024-05-04T22:17:16.074076+0800","logger":"etcd-client","caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"} {"level":"warn","ts":"2024-05-04T22:17:16.079462+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001287880/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"info","ts":"2024-05-04T22:17:16.079509+0800","logger":"etcd-client","caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"} {"level":"warn","ts":"2024-05-04T22:17:16.138269+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000e81180/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""} {"level":"info","ts":"2024-05-04T22:17:16.13832+0800","logger":"etcd-client","caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"} ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cdc_server_tips.cli.25736.out cli tso query --pd=http://127.0.0.1:2379 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + tso='449532982130900994 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532982130900994 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x try a VALID cdc server command [Sat May 4 22:17:19 CST 2024] <<<<<< START cdc server in cdc_server_tips case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cdc_server_tips.2578125783.out server --log-file /tmp/tidb_cdc_test/cdc_server_tips/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/cdc_server_tips/cdc_data --cluster-id default + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 0 0 0 0 0 0 0 0 --:--:-- 0:00:11 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:12 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:13 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:14 --:--:-- 0{"level":"warn","ts":"2024-05-04T22:17:20.08398+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000dc3180/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-05-04T22:17:20.094645+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001287880/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-05-04T22:17:20.152131+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000e81180/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""} \033[0;36m<<< Run all test success >>>\033[0m [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1853/tiflow-cdc already exists) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fddb4040015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw, pid:17632, start at 2024-05-04 22:17:19.922254622 +0800 CST m=+5.351680299 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:19:19.928 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:17:19.923 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:07:19.923 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] // withCredentials [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] } /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only/run.sh: line 1: 13319 Killed cdc_kafka_consumer --upstream-uri $SINK_URI --downstream-uri="mysql://root@127.0.0.1:3306/?safe-mode=true&batch-dml-enable=false" --upstream-tidb-dsn="root@tcp(${UP_TIDB_HOST}:${UP_TIDB_PORT})/?" --config="$CUR/conf/changefeed.toml" 2>&1 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only_avro/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fddb4040015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw, pid:17632, start at 2024-05-04 22:17:19.922254622 +0800 CST m=+5.351680299 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:19:19.928 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:17:19.923 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:07:19.923 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fddb7040014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-1nlmb-vbbrw, pid:17715, start at 2024-05-04 22:17:20.10120274 +0800 CST m=+5.473286690 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:19:20.107 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:17:20.065 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:07:20.065 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/synced_status/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/synced_status/tiflash/log/error.log arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/synced_status/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/synced_status/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/synced_status/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:17:22 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a2f76af9-ba54-425d-9311-f6f0fb48373a {"id":"a2f76af9-ba54-425d-9311-f6f0fb48373a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832240} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f72723e3 a2f76af9-ba54-425d-9311-f6f0fb48373a /tidb/cdc/default/default/upstream/7365148316092919800 {"id":7365148316092919800,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a2f76af9-ba54-425d-9311-f6f0fb48373a {"id":"a2f76af9-ba54-425d-9311-f6f0fb48373a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832240} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f72723e3 a2f76af9-ba54-425d-9311-f6f0fb48373a /tidb/cdc/default/default/upstream/7365148316092919800 {"id":7365148316092919800,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a2f76af9-ba54-425d-9311-f6f0fb48373a {"id":"a2f76af9-ba54-425d-9311-f6f0fb48373a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832240} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f72723e3 a2f76af9-ba54-425d-9311-f6f0fb48373a /tidb/cdc/default/default/upstream/7365148316092919800 {"id":7365148316092919800,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x start tidb cluster in /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... + cd /tmp/tidb_cdc_test/synced_status ++ run_cdc_cli_tso_query 127.0.0.1 2379 + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status.cli.19180.out cli tso query --pd=http://127.0.0.1:2379 0 0 0 0 0 0 0 0 --:--:-- 0:00:15 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:16 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:17 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:18 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:19 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:20 --:--:-- 0{"level":"warn","ts":"2024-05-04T22:17:26.085757+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000dc3180/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-05-04T22:17:26.096505+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001287880/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-05-04T22:17:26.152564+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000e81180/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""} {"level":"warn","ts":1714832246.7620943,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc003a91180/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":1714832246.7620878,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc003a161c0/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""} {"level":"info","ts":1714832246.7621388,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"} {"level":"info","ts":1714832246.7621522,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"} + set +x + tso='449532984091738113 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532984091738113 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + start_ts=449532984091738113 + run_cdc_server --workdir /tmp/tidb_cdc_test/synced_status --binary cdc.test [Sat May 4 22:17:26 CST 2024] <<<<<< START cdc server in synced_status case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status.1921819220.out server --log-file /tmp/tidb_cdc_test/synced_status/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/synced_status/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 0 0 0 0 0 0 0 0 --:--:-- 0:00:21 --:--:-- 0{"level":"warn","ts":1714832247.2126462,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc002093880/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"info","ts":1714832247.2127156,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"} Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:17:29 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/60a90557-50cc-47f8-8038-04d9d06be62e {"id":"60a90557-50cc-47f8-8038-04d9d06be62e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832247} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f74844d6 60a90557-50cc-47f8-8038-04d9d06be62e /tidb/cdc/default/default/upstream/7365148351308939581 {"id":7365148351308939581,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/60a90557-50cc-47f8-8038-04d9d06be62e {"id":"60a90557-50cc-47f8-8038-04d9d06be62e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832247} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f74844d6 60a90557-50cc-47f8-8038-04d9d06be62e /tidb/cdc/default/default/upstream/7365148351308939581 {"id":7365148351308939581,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/60a90557-50cc-47f8-8038-04d9d06be62e {"id":"60a90557-50cc-47f8-8038-04d9d06be62e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832247} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f74844d6 60a90557-50cc-47f8-8038-04d9d06be62e /tidb/cdc/default/default/upstream/7365148351308939581 {"id":7365148351308939581,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + config_path=conf/changefeed.toml + SINK_URI='mysql://root@127.0.0.1:3306/?max-txn-row=1' + run_cdc_cli changefeed create --start-ts=449532984091738113 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status/conf/changefeed.toml + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status.cli.19271.out cli changefeed create --start-ts=449532984091738113 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status/conf/changefeed.toml Create changefeed successfully! ID: test-1 Info: {"upstream_id":7365148351308939581,"namespace":"default","id":"test-1","sink_uri":"mysql://root@127.0.0.1:3306/?max-txn-row=1","create_time":"2024-05-04T22:17:30.21839309+08:00","start_ts":449532984091738113,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":120,"checkpoint_interval":20}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532984091738113,"checkpoint_ts":449532984091738113,"checkpoint_time":"2024-05-04 22:17:25.223"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x ++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 221 100 221 0 0 1128 0 --:--:-- --:--:-- --:--:-- 1133 + synced_status='{"synced":true,"sink_checkpoint_ts":"2024-05-04 22:17:31.623","puller_resolved_ts":"1970-01-01 08:00:00.000","last_synced_ts":"1970-01-01 08:00:00.000","now_ts":"2024-05-04 22:17:32.000","info":"Data syncing is finished"}' ++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-04' '22:17:31.623","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-05-04' '22:17:32.000","info":"Data' syncing is 'finished"}' ++ jq .synced 0 0 0 0 0 0 0 0 --:--:-- 0:00:22 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:23 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:24 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:25 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:26 --:--:-- 0{"level":"warn","ts":"2024-05-04T22:17:32.086878+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000dc3180/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-05-04T22:17:32.097925+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001287880/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-05-04T22:17:32.152837+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000e81180/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""} + status=true ++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-04' '22:17:31.623","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-05-04' '22:17:32.000","info":"Data' syncing is 'finished"}' ++ jq -r .sink_checkpoint_ts + sink_checkpoint_ts='2024-05-04 22:17:31.623' ++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-04' '22:17:31.623","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-05-04' '22:17:32.000","info":"Data' syncing is 'finished"}' ++ jq -r .puller_resolved_ts + puller_resolved_ts='1970-01-01 08:00:00.000' ++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-04' '22:17:31.623","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-05-04' '22:17:32.000","info":"Data' syncing is 'finished"}' ++ jq -r .last_synced_ts + last_synced_ts='1970-01-01 08:00:00.000' + '[' true '!=' true ']' + '[' '1970-01-01 08:00:00.000' '!=' '1970-01-01 08:00:00.000' ']' + '[' '1970-01-01 08:00:00.000' '!=' '1970-01-01 08:00:00.000' ']' ++ date '+%Y-%m-%d %H:%M:%S' + current='2024-05-04 22:17:32' + echo 'sink_checkpoint_ts is 2024-05-04' 22:17:31.623 sink_checkpoint_ts is 2024-05-04 22:17:31.623 ++ date -d '2024-05-04 22:17:31.623' +%s + checkpoint_timestamp=1714832251 ++ date -d '2024-05-04 22:17:32' +%s + current_timestamp=1714832252 + '[' 1 -gt 300 ']' + run_sql 'USE TEST;Create table t1(a int primary key, b int);insert into t1 values(1,2);insert into t1 values(2,3);' + check_table_exists test.t1 127.0.0.1 3306 table test.t1 not exists for 1-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.t1 exists + sleep 5 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) 0 0 0 0 0 0 0 0 --:--:-- 0:00:27 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:28 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:29 --:--:-- 0 100 135 100 135 0 0 4 0 0:00:33 0:00:30 0:00:03 27 100 135 100 135 0 0 4 0 0:00:33 0:00:30 0:00:03 33 + synced_status='{ "error_msg": "[CDC:ErrPDEtcdAPIError]etcd api call error: context deadline exceeded", "error_code": "CDC:ErrPDEtcdAPIError" }' ++ echo '{' '"error_msg":' '"[CDC:ErrPDEtcdAPIError]etcd' api call error: context deadline 'exceeded",' '"error_code":' '"CDC:ErrPDEtcdAPIError"' '}' ++ jq -r .error_code + error_code=CDC:ErrPDEtcdAPIError + cleanup_process cdc.test wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fde934c0012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2, pid:14535, start at 2024-05-04 22:17:34.191544644 +0800 CST m=+5.332980457 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:19:34.198 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:17:34.163 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:07:34.163 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fde934c0012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2, pid:14535, start at 2024-05-04 22:17:34.191544644 +0800 CST m=+5.332980457 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:19:34.198 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:17:34.163 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:07:34.163 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fde94c40013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-lr873-b8lc2, pid:14618, start at 2024-05-04 22:17:34.2816537 +0800 CST m=+5.369852769 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:19:34.288 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:17:34.257 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:07:34.257 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/log/error.log arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } wait process cdc.test exit for 3-th time... cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit + stop_tidb_cluster [Sat May 4 22:17:39 CST 2024] <<<<<< START cdc server in kafka_simple_handle_key_only_avro case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only_avro.1594015942.out server --log-file /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 ++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 243 100 243 0 0 2875 0 --:--:-- --:--:-- --:--:-- 2858 100 243 100 243 0 0 2872 0 --:--:-- --:--:-- --:--:-- 2858 + synced_status='{"synced":false,"sink_checkpoint_ts":"2024-05-04 22:17:38.623","puller_resolved_ts":"2024-05-04 22:17:32.323","last_synced_ts":"2024-05-04 22:17:32.423","now_ts":"2024-05-04 22:17:39.000","info":"The data syncing is not finished, please wait"}' ++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-04' '22:17:38.623","puller_resolved_ts":"2024-05-04' '22:17:32.323","last_synced_ts":"2024-05-04' '22:17:32.423","now_ts":"2024-05-04' '22:17:39.000","info":"The' data syncing is not finished, please 'wait"}' ++ jq .synced + status=false + '[' false '!=' false ']' ++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-04' '22:17:38.623","puller_resolved_ts":"2024-05-04' '22:17:32.323","last_synced_ts":"2024-05-04' '22:17:32.423","now_ts":"2024-05-04' '22:17:39.000","info":"The' data syncing is not finished, please 'wait"}' ++ jq -r .info + info='The data syncing is not finished, please wait' + '[' 'The data syncing is not finished, please wait' '!=' 'The data syncing is not finished, please wait' ']' + sleep 130 table test1.finishmark exists [2024/05/04 22:17:32.149 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test.t1] [checkSum=3030946575] [2024/05/04 22:17:32.151 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test.t2] [checkSum=718014124] [2024/05/04 22:17:32.153 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test.t3] [checkSum=718014124] [2024/05/04 22:17:32.159 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test1.finishmark] [checkSum=0] [2024/05/04 22:17:32.161 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test1.t1] [checkSum=718014124] [2024/05/04 22:17:32.161 +08:00] [INFO] [main.go:107] ["get checksum for the upstream success"] [elapsed=16.2717ms] [2024/05/04 22:17:32.164 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test.t1] [checkSum=3030946575] [2024/05/04 22:17:32.166 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test.t2] [checkSum=718014124] [2024/05/04 22:17:32.169 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test.t3] [checkSum=718014124] [2024/05/04 22:17:32.175 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test1.finishmark] [checkSum=0] [2024/05/04 22:17:32.176 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test1.t1] [checkSum=718014124] [2024/05/04 22:17:32.177 +08:00] [INFO] [main.go:116] ["get checksum for the downstream success"] [elapsed=15.369965ms] [2024/05/04 22:17:32.177 +08:00] [INFO] [main.go:95] ["compare checksum passed"] wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:17:33 CST 2024] <<<<<< run test case kafka_column_selector success! >>>>>> + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:17:42 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6878dc40-91f4-4ca9-8848-7fdf666aa748 {"id":"6878dc40-91f4-4ca9-8848-7fdf666aa748","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832259} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f78015cc 6878dc40-91f4-4ca9-8848-7fdf666aa748 /tidb/cdc/default/default/upstream/7365148411467301496 {"id":7365148411467301496,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6878dc40-91f4-4ca9-8848-7fdf666aa748 {"id":"6878dc40-91f4-4ca9-8848-7fdf666aa748","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832259} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f78015cc 6878dc40-91f4-4ca9-8848-7fdf666aa748 /tidb/cdc/default/default/upstream/7365148411467301496 {"id":7365148411467301496,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6878dc40-91f4-4ca9-8848-7fdf666aa748 {"id":"6878dc40-91f4-4ca9-8848-7fdf666aa748","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832259} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f78015cc 6878dc40-91f4-4ca9-8848-7fdf666aa748 /tidb/cdc/default/default/upstream/7365148411467301496 {"id":7365148411467301496,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only_avro.cli.16005.out cli tso query --pd=http://127.0.0.1:2379 + set +x + tso='449532988650160130 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532988650160130 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only_avro.cli.16044.out cli changefeed create --start-ts=449532988650160130 '--sink-uri=kafka://127.0.0.1:9092/simple-handle-key-only-avro-22772?protocol=simple&encoding-format=avro' -c simple-handle-key-only-avro --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only_avro/conf/changefeed.toml Create changefeed successfully! ID: simple-handle-key-only-avro Info: {"upstream_id":7365148411467301496,"namespace":"default","id":"simple-handle-key-only-avro","sink_uri":"kafka://127.0.0.1:9092/simple-handle-key-only-avro-22772?protocol=simple\u0026encoding-format=avro","create_time":"2024-05-04T22:17:44.481226205+08:00","start_ts":449532988650160130,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"simple","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"kafka_config":{"large_message_handle":{"large_message_handle_option":"handle-key-only","large_message_handle_compression":"lz4","claim_check_storage_uri":""}},"advance_timeout":150,"send_bootstrap_interval_in_sec":0,"send_bootstrap_in_msg_count":0,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532988650160130,"checkpoint_ts":449532988650160130,"checkpoint_time":"2024-05-04 22:17:42.612"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... valid ~~~ running cdc Failed to start cdc, the usage tips should be printed 1st test case cdc_server_tips success! try an INVALID cdc server command [Sat May 4 22:17:42 CST 2024] <<<<<< START cdc server in cdc_server_tips case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cdc_server_tips.2587425876.out server --log-file /tmp/tidb_cdc_test/cdc_server_tips/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/cdc_server_tips/cdc_data --cluster-id default --pd None + [[ true != \n\o ]] + set +x /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_column_selector/run.sh: line 1: 17149 Killed cdc_kafka_consumer --upstream-uri $SINK_URI --downstream-uri="mysql://root@127.0.0.1:3306/?safe-mode=true&batch-dml-enable=false" --upstream-tidb-dsn="root@tcp(${UP_TIDB_HOST}:${UP_TIDB_PORT})/?" --config="$CUR/conf/changefeed.toml" 2>&1 (wd: /tmp/tidb_cdc_test/kafka_column_selector) =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_column_selector_avro/run.sh using Sink-Type: kafka... <<================= Starting schema registry... * About to connect() to 127.0.0.1 port 8088 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8088; Connection refused * Closing connection 0 + set +x * About to connect() to 127.0.0.1 port 8088 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8088; Connection refused * Closing connection 0 * About to connect() to 127.0.0.1 port 8088 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8088 (#0) > GET / HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8088 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:17:49 GMT < Content-Type: application/vnd.schemaregistry.v1+json < Vary: Accept-Encoding, User-Agent < Content-Length: 2 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 49 100 24 100 25 381 397 --:--:-- --:--:-- --:--:-- 403 {"compatibility":"NONE"}The 1 times to try to start tidb cluster... + run_case_with_unavailable_tikv conf/changefeed-redo.toml + rm -rf /tmp/tidb_cdc_test/synced_status_with_redo + mkdir -p /tmp/tidb_cdc_test/synced_status_with_redo + start_tidb_cluster --workdir /tmp/tidb_cdc_test/synced_status_with_redo shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory The 1 times to try to start tidb cluster... shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only_avro.cli.16112.out cli changefeed pause -c simple-handle-key-only-avro PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... start tidb cluster in /tmp/tidb_cdc_test/kafka_column_selector_avro Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only_avro.cli.16140.out cli changefeed update -c simple-handle-key-only-avro '--sink-uri=kafka://127.0.0.1:9092/simple-handle-key-only-avro-22772?protocol=simple&encoding-format=avro&max-message-bytes=650' --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only_avro/conf/changefeed.toml --no-confirm Verifying downstream PD is started... Diff of changefeed config: {Type:update Path:[SinkURI] From:kafka://127.0.0.1:9092/simple-handle-key-only-avro-22772?protocol=simple&encoding-format=avro To:kafka://127.0.0.1:9092/simple-handle-key-only-avro-22772?protocol=simple&encoding-format=avro&max-message-bytes=650} {Type:update Path:[Config SyncPointInterval] From: To:0xc003a2a578} {Type:update Path:[Config SyncPointRetention] From: To:0xc003a2a588} {Type:update Path:[Config Consistent] From: To:0xc000a4c230} Update changefeed config successfully! ID: simple-handle-key-only-avro Info: {"upstream_id":7365148411467301496,"namespace":"default","id":"simple-handle-key-only-avro","sink_uri":"kafka://127.0.0.1:9092/simple-handle-key-only-avro-22772?protocol=simple\u0026encoding-format=avro\u0026max-message-bytes=650","create_time":"2024-05-04T22:17:44.481226205+08:00","start_ts":449532988650160130,"admin_job_type":1,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_table_monitor":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"simple","encoder_concurrency":32,"terminator":"\r\n","enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"kafka_config":{"large_message_handle":{"large_message_handle_option":"handle-key-only","large_message_handle_compression":"lz4","claim_check_storage_uri":""}},"advance_timeout":150,"send_bootstrap_interval_in_sec":0,"send_bootstrap_in_msg_count":0,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"stopped","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":0,"checkpoint_ts":449532991088099332,"checkpoint_time":"2024-05-04 22:17:51.912"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory start tidb cluster in /tmp/tidb_cdc_test/synced_status_with_redo Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only_avro.cli.16171.out cli changefeed resume -c simple-handle-key-only-avro Verifying downstream PD is started... PASS coverage: 2.1% of statements in github.com/pingcap/tiflow/... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x table test.finish_mark not exists for 1-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.finish_mark not exists for 2-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) invalid ~~~ running cdc Failed to start cdc, the usage tips should be printed 2nd test case cdc_server_tips success! [Sat May 4 22:18:02 CST 2024] <<<<<< run all test cases cdc_server_tips success! >>>>>> table test.finish_mark exists check diff failed 1-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fe05a90000a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6, pid:30422, start at 2024-05-04 22:18:03.30990537 +0800 CST m=+5.130066483 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:20:03.318 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:18:03.300 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:08:03.300 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fe05a90000a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6, pid:30422, start at 2024-05-04 22:18:03.30990537 +0800 CST m=+5.130066483 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:20:03.318 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:18:03.300 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:08:03.300 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fe05c200013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6, pid:30505, start at 2024-05-04 22:18:03.426240354 +0800 CST m=+5.192000794 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:20:03.434 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:18:03.400 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:08:03.400 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_column_selector_avro/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_column_selector_avro/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_column_selector_avro/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_column_selector_avro/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_column_selector_avro/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } check diff failed 2-th time, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fe075e4000a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc, pid:13468, start at 2024-05-04 22:18:05.061860936 +0800 CST m=+5.236559252 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:20:05.069 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:18:05.049 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:08:05.049 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Sat May 4 22:18:06 CST 2024] <<<<<< START cdc server in kafka_column_selector_avro case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_column_selector_avro.3198431986.out server --log-file /tmp/tidb_cdc_test/kafka_column_selector_avro/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_column_selector_avro/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff failed 3-th time, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fe075e4000a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc, pid:13468, start at 2024-05-04 22:18:05.061860936 +0800 CST m=+5.236559252 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:20:05.069 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:18:05.049 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:08:05.049 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fe076940015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fqx8d-vmvsc, pid:13555, start at 2024-05-04 22:18:05.127124346 +0800 CST m=+5.236052650 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:20:05.133 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:18:05.093 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:08:05.093 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash-proxy.toml"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } check diff failed 4-th time, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:18:09 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c58ce6b3-3210-4ec9-b010-4b8bac075ceb {"id":"c58ce6b3-3210-4ec9-b010-4b8bac075ceb","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832286} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f7ee96ca c58ce6b3-3210-4ec9-b010-4b8bac075ceb /tidb/cdc/default/default/upstream/7365148533045328030 {"id":7365148533045328030,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c58ce6b3-3210-4ec9-b010-4b8bac075ceb {"id":"c58ce6b3-3210-4ec9-b010-4b8bac075ceb","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832286} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f7ee96ca c58ce6b3-3210-4ec9-b010-4b8bac075ceb /tidb/cdc/default/default/upstream/7365148533045328030 {"id":7365148533045328030,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c58ce6b3-3210-4ec9-b010-4b8bac075ceb {"id":"c58ce6b3-3210-4ec9-b010-4b8bac075ceb","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832286} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f7ee96ca c58ce6b3-3210-4ec9-b010-4b8bac075ceb /tidb/cdc/default/default/upstream/7365148533045328030 {"id":7365148533045328030,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_column_selector_avro.cli.32041.out cli changefeed create --start-ts=449532994925625345 '--sink-uri=kafka://127.0.0.1:9092/column-selector-avro-test?protocol=avro&enable-tidb-extension=true&avro-enable-watermark=true' -c test --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_column_selector_avro/conf/changefeed.toml --schema-registry=http://127.0.0.1:8088 Create changefeed successfully! ID: test Info: {"upstream_id":7365148533045328030,"namespace":"default","id":"test","sink_uri":"kafka://127.0.0.1:9092/column-selector-avro-test?protocol=avro\u0026enable-tidb-extension=true\u0026avro-enable-watermark=true","create_time":"2024-05-04T22:18:10.168997932+08:00","start_ts":449532994925625345,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"avro","schema_registry":"http://127.0.0.1:8088","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"column_selectors":[{"matcher":["test.*"],"columns":["*","!b"]}],"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532994925625345,"checkpoint_ts":449532994925625345,"checkpoint_time":"2024-05-04 22:18:06.551"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + cd /tmp/tidb_cdc_test/synced_status_with_redo ++ run_cdc_cli_tso_query 127.0.0.1 2379 + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.cli.15013.out cli tso query --pd=http://127.0.0.1:2379 + set +x [Sat May 4 22:18:11 CST 2024] <<<<<< START kafka consumer in kafka_column_selector_avro case >>>>>> consumer replica config found: /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_column_selector_avro/conf/changefeed.toml schema registry uri found: http://127.0.0.1:8088 + set +x + tso='449532995960569857 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449532995960569857 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + start_ts=449532995960569857 + run_cdc_server --workdir /tmp/tidb_cdc_test/synced_status_with_redo --binary cdc.test [Sat May 4 22:18:11 CST 2024] <<<<<< START cdc server in synced_status_with_redo case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.1505015052.out server --log-file /tmp/tidb_cdc_test/synced_status_with_redo/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/synced_status_with_redo/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff failed 5-th time, retry later check diff failed 6-th time, retry later Starting build checksum checker... table test.finishmark not exists for 1-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/event_filter/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:18:15 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2c787d62-5cf2-4a71-9960-ba406fdee52a {"id":"2c787d62-5cf2-4a71-9960-ba406fdee52a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832292} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f7f6f9d8 2c787d62-5cf2-4a71-9960-ba406fdee52a /tidb/cdc/default/default/upstream/7365148542639442321 {"id":7365148542639442321,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2c787d62-5cf2-4a71-9960-ba406fdee52a {"id":"2c787d62-5cf2-4a71-9960-ba406fdee52a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832292} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f7f6f9d8 2c787d62-5cf2-4a71-9960-ba406fdee52a /tidb/cdc/default/default/upstream/7365148542639442321 {"id":7365148542639442321,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2c787d62-5cf2-4a71-9960-ba406fdee52a {"id":"2c787d62-5cf2-4a71-9960-ba406fdee52a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832292} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f7f6f9d8 2c787d62-5cf2-4a71-9960-ba406fdee52a /tidb/cdc/default/default/upstream/7365148542639442321 {"id":7365148542639442321,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + config_path=conf/changefeed-redo.toml + SINK_URI='mysql://root@127.0.0.1:3306/?max-txn-row=1' + run_cdc_cli changefeed create --start-ts=449532995960569857 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/conf/changefeed-redo.toml + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.cli.15113.out cli changefeed create --start-ts=449532995960569857 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/conf/changefeed-redo.toml Create changefeed successfully! ID: test-1 Info: {"upstream_id":7365148542639442321,"namespace":"default","id":"test-1","sink_uri":"mysql://root@127.0.0.1:3306/?max-txn-row=1","create_time":"2024-05-04T22:18:15.496055644+08:00","start_ts":449532995960569857,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"eventual","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"storage":"file:///tmp/tidb_cdc_test/synced_status/redo","use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":120,"checkpoint_interval":20}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449532995960569857,"checkpoint_ts":449532995960569857,"checkpoint_time":"2024-05-04 22:18:10.499"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... check diff failed 7-th time, retry later table test.finishmark not exists for 2-th check, retry later + set +x + run_sql 'USE TEST;Create table t1(a int primary key, b int);insert into t1 values(1,2);insert into t1 values(2,3);' + check_table_exists test.t1 127.0.0.1 3306 table test.t1 not exists for 1-th check, retry later table test.finishmark exists [2024/05/04 22:18:17.807 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test.finishmark] [checkSum=0] [2024/05/04 22:18:17.810 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test.t1] [checkSum=718014124] [2024/05/04 22:18:17.810 +08:00] [INFO] [main.go:107] ["get checksum for the upstream success"] [elapsed=9.251134ms] [2024/05/04 22:18:17.816 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test.finishmark] [checkSum=0] [2024/05/04 22:18:17.818 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test.t1] [checkSum=718014124] [2024/05/04 22:18:17.818 +08:00] [INFO] [main.go:116] ["get checksum for the downstream success"] [elapsed=8.335128ms] [2024/05/04 22:18:17.818 +08:00] [INFO] [main.go:95] ["compare checksum passed"] wait process cdc.test exit for 1-th time... [2024/05/04 22:18:16.049 +08:00] [WARN] [diff.go:745] ["rows is not equal"] [table=`test`.`ntest`] [where="((TRUE) AND TRUE)"] [cost=1m35.075379978s] [2024/05/04 22:18:16.051 +08:00] [WARN] [diff.go:384] ["check chunk data not equal"] [chunk="{\"id\":0,\"bounds\":[],\"where\":\"((TRUE) AND TRUE)\",\"args\":null,\"state\":\"failed\"}"] check diff failed 8-th time, retry later start tidb cluster in /tmp/tidb_cdc_test/event_filter Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... wait process cdc.test exit for 2-th time... wait process cdc.test exit for 3-th time... table test.t1 exists + sleep 5 cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 22:18:19 CST 2024] <<<<<< run test case kafka_column_selector_avro success! >>>>>> check diff failed 9-th time, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release check diff failed 10-th time, retry later [2024/05/04 22:18:22.070 +08:00] [INFO] [dailytest.go:68] ["test pass!!!"] Starting Upstream TiDB... wait process cdc.test exit for 1-th time... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 2-th time... wait process cdc.test exit for 3-th time... cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 22:18:24 CST 2024] <<<<<< run test case cdc success! >>>>>> check diff failed 11-th time, retry later + kill_tikv ++ ps aux ++ grep tikv-server ++ grep /tmp/tidb_cdc_test/synced_status_with_redo + info='jenkins 12826 22.5 0.5 4691496 2204980 ? Sl 22:17 0:05 tikv-server --pd 127.0.0.1:2379 -A 127.0.0.1:20160 --status-addr 127.0.0.1:20181 --log-file /tmp/tidb_cdc_test/synced_status_with_redo/tikv1.log --log-level debug -C /tmp/tidb_cdc_test/synced_status_with_redo/tikv-config.toml -s /tmp/tidb_cdc_test/synced_status_with_redo/tikv1 jenkins 12827 30.2 0.5 4727332 2274916 ? Sl 22:17 0:07 tikv-server --pd 127.0.0.1:2379 -A 127.0.0.1:20161 --status-addr 127.0.0.1:20182 --log-file /tmp/tidb_cdc_test/synced_status_with_redo/tikv2.log --log-level debug -C /tmp/tidb_cdc_test/synced_status_with_redo/tikv-config.toml -s /tmp/tidb_cdc_test/synced_status_with_redo/tikv2 jenkins 12828 22.4 0.5 4694052 2221836 ? Sl 22:17 0:05 tikv-server --pd 127.0.0.1:2379 -A 127.0.0.1:20162 --status-addr 127.0.0.1:20183 --log-file /tmp/tidb_cdc_test/synced_status_with_redo/tikv3.log --log-level debug -C /tmp/tidb_cdc_test/synced_status_with_redo/tikv-config.toml -s /tmp/tidb_cdc_test/synced_status_with_redo/tikv3 jenkins 12831 29.2 0.5 4722212 2264416 ? Sl 22:17 0:07 tikv-server --pd 127.0.0.1:2479 -A 127.0.0.1:21160 --status-addr 127.0.0.1:21180 --log-file /tmp/tidb_cdc_test/synced_status_with_redo/tikv_down.log --log-level debug -C /tmp/tidb_cdc_test/synced_status_with_redo/tikv-config.toml -s /tmp/tidb_cdc_test/synced_status_with_redo/tikv_down' ++ ps aux ++ grep tikv-server ++ grep /tmp/tidb_cdc_test/synced_status_with_redo ++ xargs kill -9 ++ awk '{print $2}' ++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 243 100 243 0 0 1856 0 --:--:-- --:--:-- --:--:-- 1869 + synced_status='{"synced":false,"sink_checkpoint_ts":"2024-05-04 22:18:22.849","puller_resolved_ts":"2024-05-04 22:18:16.949","last_synced_ts":"2024-05-04 22:18:16.999","now_ts":"2024-05-04 22:18:24.000","info":"The data syncing is not finished, please wait"}' ++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-04' '22:18:22.849","puller_resolved_ts":"2024-05-04' '22:18:16.949","last_synced_ts":"2024-05-04' '22:18:16.999","now_ts":"2024-05-04' '22:18:24.000","info":"The' data syncing is not finished, please 'wait"}' ++ jq .synced + status=false + '[' false '!=' false ']' ++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-04' '22:18:22.849","puller_resolved_ts":"2024-05-04' '22:18:16.949","last_synced_ts":"2024-05-04' '22:18:16.999","now_ts":"2024-05-04' '22:18:24.000","info":"The' data syncing is not finished, please 'wait"}' ++ jq -r .info + info='The data syncing is not finished, please wait' + target_message='The data syncing is not finished, please wait' + '[' 'The data syncing is not finished, please wait' '!=' 'The data syncing is not finished, please wait' ']' + sleep 130 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 12-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 13-th time, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fe1de08000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m, pid:27034, start at 2024-05-04 22:18:28.11490942 +0800 CST m=+5.223565563 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:20:28.121 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:18:28.098 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:08:28.098 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 14-th time, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fe1de08000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m, pid:27034, start at 2024-05-04 22:18:28.11490942 +0800 CST m=+5.223565563 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:20:28.121 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:18:28.098 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:08:28.098 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fe1e0440014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m, pid:27124, start at 2024-05-04 22:18:28.26423656 +0800 CST m=+5.315317551 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:20:28.270 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:18:28.241 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:08:28.241 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/event_filter/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/event_filter/tiflash/log/error.log arg matches is ArgMatches { args: {"log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/event_filter/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/event_filter/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/event_filter/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/debezium/run.sh using Sink-Type: kafka... <<================= check diff failed 15-th time, retry later [Sat May 4 22:18:33 CST 2024] <<<<<< START cdc server in event_filter case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.event_filter.2846428466.out server --log-file /tmp/tidb_cdc_test/event_filter/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/event_filter/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff failed 16-th time, retry later % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 1415 100 678 100 737 1462 1589 --:--:-- --:--:-- --:--:-- 1588 100 1415 100 678 100 737 1462 1589 --:--:-- --:--:-- --:--:-- 1588 HTTP/1.1 201 Created Date: Sat, 04 May 2024 14:18:34 GMT Location: http://localhost:8083/connectors/my-connector Content-Type: application/json Content-Length: 678 Server: Jetty(9.4.51.v20230217) {"name":"my-connector","config":{"connector.class":"io.debezium.connector.mysql.MySqlConnector","tasks.max":"1","database.hostname":"127.0.0.1","database.port":"3310","database.user":"debezium","database.password":"dbz","database.server.id":"184054","topic.prefix":"dbserver1","schema.history.internal.kafka.bootstrap.servers":"127.0.0.1:9092","schema.history.internal.kafka.topic":"schemahistory.test","transforms":"x","transforms.x.type":"org.apache.kafka.connect.transforms.RegexRouter","transforms.x.regex":"(.*)","transforms.x.replacement":"output_debezium","binary.handling.mode":"base64","decimal.handling.mode":"double","name":"my-connector"},"tasks":[],"type":"source"}The 1 times to try to start tidb cluster... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:18:36 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/676cc675-fe4c-403d-bd34-215cbde3eed5 {"id":"676cc675-fe4c-403d-bd34-215cbde3eed5","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832313} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f85135cc 676cc675-fe4c-403d-bd34-215cbde3eed5 /tidb/cdc/default/default/upstream/7365148646301314741 {"id":7365148646301314741,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/676cc675-fe4c-403d-bd34-215cbde3eed5 {"id":"676cc675-fe4c-403d-bd34-215cbde3eed5","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832313} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f85135cc 676cc675-fe4c-403d-bd34-215cbde3eed5 /tidb/cdc/default/default/upstream/7365148646301314741 {"id":7365148646301314741,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/676cc675-fe4c-403d-bd34-215cbde3eed5 {"id":"676cc675-fe4c-403d-bd34-215cbde3eed5","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832313} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f85135cc 676cc675-fe4c-403d-bd34-215cbde3eed5 /tidb/cdc/default/default/upstream/7365148646301314741 {"id":7365148646301314741,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.event_filter.cli.28520.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-event-filter-9651?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --server=127.0.0.1:8300 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/event_filter/conf/cf.toml Create changefeed successfully! ID: a7edad64-4192-4fd3-84d7-faad31282a2c Info: {"upstream_id":7365148646301314741,"namespace":"default","id":"a7edad64-4192-4fd3-84d7-faad31282a2c","sink_uri":"kafka://127.0.0.1:9092/ticdc-event-filter-9651?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:18:36.766688768+08:00","start_ts":449533002815373315,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["event_filter.*"],"event_filters":[{"matcher":["event_filter.t1"],"ignore_event":["drop table","delete"],"ignore_sql":null,"ignore_insert_value_expr":"id = 2 or city = 'tokyo'","ignore_update_new_value_expr":"","ignore_update_old_value_expr":"","ignore_delete_value_expr":""},{"matcher":["event_filter.t_truncate"],"ignore_event":["truncate table"],"ignore_sql":null,"ignore_insert_value_expr":"","ignore_update_new_value_expr":"","ignore_update_old_value_expr":"","ignore_delete_value_expr":""},{"matcher":["event_filter.t_alter"],"ignore_event":["alter table"],"ignore_sql":null,"ignore_insert_value_expr":"","ignore_update_new_value_expr":"","ignore_update_old_value_expr":"","ignore_delete_value_expr":""}]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449533002815373315,"checkpoint_ts":449533002815373315,"checkpoint_time":"2024-05-04 22:18:36.648"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... check diff failed 17-th time, retry later + set +x [Sat May 4 22:18:38 CST 2024] <<<<<< START kafka consumer in event_filter case >>>>>> check diff failed 18-th time, retry later table event_filter.t1 does not exists table event_filter.t1 not exists for 1-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/debezium Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table event_filter.t1 exists table event_filter.t_normal not exists for 1-th check, retry later check diff failed 19-th time, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/split_region/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release check diff failed 20-th time, retry later table event_filter.t_normal exists table event_filter.t_truncate not exists for 1-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/split_region Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 21-th time, retry later table event_filter.t_truncate exists table event_filter.t_alter not exists for 1-th check, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table event_filter.t_alter exists ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 22-th time, retry later table event_filter.finish_mark exists check diff failed 1-th time, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 23-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 2-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 24-th time, retry later check diff successfully wait process cdc.test exit for 1-th time... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fe337900014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6, pid:33259, start at 2024-05-04 22:18:50.251177568 +0800 CST m=+5.193602171 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:20:50.258 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:18:50.261 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:08:50.261 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fe337900014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6, pid:33259, start at 2024-05-04 22:18:50.251177568 +0800 CST m=+5.193602171 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:20:50.258 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:18:50.261 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:08:50.261 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fe33840000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-6xzf3-06nx6, pid:33328, start at 2024-05-04 22:18:50.273931887 +0800 CST m=+5.166103268 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:20:50.280 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:18:50.256 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:08:50.256 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Logging trace to /tmp/tidb_cdc_test/debezium/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/debezium/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/debezium/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/debezium/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/debezium/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } wait process cdc.test exit for 2-th time... check diff failed 25-th time, retry later cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 22:18:54 CST 2024] <<<<<< run test case event_filter success! >>>>>> VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fe37154001a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fchds-2rh3m, pid:25067, start at 2024-05-04 22:18:53.948637286 +0800 CST m=+5.213828506 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:20:53.957 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:18:53.959 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:08:53.959 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fe37154001a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fchds-2rh3m, pid:25067, start at 2024-05-04 22:18:53.948637286 +0800 CST m=+5.213828506 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:20:53.957 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:18:53.959 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:08:53.959 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fe372c40014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-fchds-2rh3m, pid:25139, start at 2024-05-04 22:18:54.031514672 +0800 CST m=+5.243505172 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:20:54.040 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:18:54.001 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:08:54.001 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/split_region/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/split_region/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/split_region/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/split_region/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/split_region/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [Sat May 4 22:18:55 CST 2024] <<<<<< START cdc server in debezium case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.debezium.3475934761.out server --log-file /tmp/tidb_cdc_test/debezium/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/debezium/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff failed 26-th time, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.split_region.cli.26475.out cli tso query --pd=http://127.0.0.1:2379 check diff failed 27-th time, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:18:58 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/703b180a-a226-4126-9fe0-ae8a75f141e7 {"id":"703b180a-a226-4126-9fe0-ae8a75f141e7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832335} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f8a1eece 703b180a-a226-4126-9fe0-ae8a75f141e7 /tidb/cdc/default/default/upstream/7365148739706012508 {"id":7365148739706012508,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/703b180a-a226-4126-9fe0-ae8a75f141e7 {"id":"703b180a-a226-4126-9fe0-ae8a75f141e7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832335} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f8a1eece 703b180a-a226-4126-9fe0-ae8a75f141e7 /tidb/cdc/default/default/upstream/7365148739706012508 {"id":7365148739706012508,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/703b180a-a226-4126-9fe0-ae8a75f141e7 {"id":"703b180a-a226-4126-9fe0-ae8a75f141e7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832335} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f8a1eece 703b180a-a226-4126-9fe0-ae8a75f141e7 /tidb/cdc/default/default/upstream/7365148739706012508 {"id":7365148739706012508,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.debezium.cli.34814.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/output_ticdc?protocol=debezium&kafka-version=2.4.0' + set +x + tso='449533008244637697 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449533008244637697 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x Create changefeed successfully! ID: 4bd31a05-efbc-4f29-8c0d-a87f7e4cd6ed Info: {"upstream_id":7365148739706012508,"namespace":"default","id":"4bd31a05-efbc-4f29-8c0d-a87f7e4cd6ed","sink_uri":"kafka://127.0.0.1:9092/output_ticdc?protocol=debezium\u0026kafka-version=2.4.0","create_time":"2024-05-04T22:18:58.909472491+08:00","start_ts":449533008612425733,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"debezium","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449533008612425733,"checkpoint_ts":449533008612425733,"checkpoint_time":"2024-05-04 22:18:58.762"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... check diff failed 28-th time, retry later [Sat May 4 22:19:00 CST 2024] <<<<<< START cdc server in split_region case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.split_region.2653026532.out server --log-file /tmp/tidb_cdc_test/split_region/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/split_region/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + set +x go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20231116213047-1f7c1e02bcd4 go: downloading go.uber.org/zap v1.26.0 go: downloading github.com/thessem/zap-prettyconsole v0.3.0 go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20231117065153-a4f85c356873 go: downloading github.com/segmentio/kafka-go v0.4.45 go: downloading github.com/google/go-cmp v0.6.0 go: downloading github.com/alecthomas/chroma v0.10.0 go: downloading github.com/google/uuid v1.3.1 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading golang.org/x/sys v0.14.0 go: downloading github.com/Code-Hex/dd v1.1.0 go: downloading github.com/pierrec/lz4/v4 v4.1.15 go: downloading github.com/klauspost/compress v1.17.1 go: downloading github.com/dlclark/regexp2 v1.4.0 check diff failed 29-th time, retry later go: downloading github.com/pingcap/errors v0.11.5-0.20221009092201-b66cddb77c32 go: downloading github.com/pingcap/log v1.1.1-0.20230317032135-a0d097d16e22 go: downloading golang.org/x/exp v0.0.0-20231006140011-7918f672742d go: downloading github.com/pingcap/sysutil v1.0.1-0.20230407040306-fb007c5aff21 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20231114060955-8fc8a528217e go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.3.0 go: downloading github.com/prometheus/client_golang v1.17.0 go: downloading github.com/golang/protobuf v1.5.3 go: downloading github.com/prometheus/client_model v0.5.0 go: downloading github.com/cockroachdb/errors v1.8.1 go: downloading github.com/shirou/gopsutil/v3 v3.23.10 go: downloading github.com/pingcap/kvproto v0.0.0-20230925123611-87bebcc0d071 go: downloading google.golang.org/protobuf v1.31.0 go: downloading google.golang.org/grpc v1.59.0 go: downloading github.com/prometheus/common v0.45.0 go: downloading github.com/cespare/xxhash/v2 v2.2.0 go: downloading github.com/prometheus/procfs v0.12.0 go: downloading github.com/cockroachdb/sentry-go v0.6.1-cockroachdb.2 go: downloading github.com/cockroachdb/redact v1.0.8 go: downloading github.com/cockroachdb/logtags v0.0.0-20190617123548-eb05cc24525f go: downloading github.com/rogpeppe/go-internal v1.11.0 go: downloading go.etcd.io/etcd/api/v3 v3.5.10 go: downloading golang.org/x/sync v0.4.0 go: downloading go.etcd.io/etcd/client/v3 v3.5.10 go: downloading github.com/tikv/pd/client v0.0.0-20231114041114-86831ce71865 go: downloading github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.10 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20231016165738-49dd2c1f3d0b go: downloading google.golang.org/genproto v0.0.0-20231016165738-49dd2c1f3d0b go: downloading golang.org/x/net v0.18.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20231016165738-49dd2c1f3d0b check diff failed 30-th time, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 14:19:03 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/998165a2-6f44-4977-bdd6-f783aeaf45b2 {"id":"998165a2-6f44-4977-bdd6-f783aeaf45b2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832340} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f8b3b0ff 998165a2-6f44-4977-bdd6-f783aeaf45b2 /tidb/cdc/default/default/upstream/7365148750263858544 {"id":7365148750263858544,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/998165a2-6f44-4977-bdd6-f783aeaf45b2 {"id":"998165a2-6f44-4977-bdd6-f783aeaf45b2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832340} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f8b3b0ff 998165a2-6f44-4977-bdd6-f783aeaf45b2 /tidb/cdc/default/default/upstream/7365148750263858544 {"id":7365148750263858544,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/998165a2-6f44-4977-bdd6-f783aeaf45b2 {"id":"998165a2-6f44-4977-bdd6-f783aeaf45b2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-52-g6a342866d","git-hash":"6a342866deda3271b067f649c64b771bbe3d2a00","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714832340} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43f8b3b0ff 998165a2-6f44-4977-bdd6-f783aeaf45b2 /tidb/cdc/default/default/upstream/7365148750263858544 {"id":7365148750263858544,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.split_region.cli.26588.out cli changefeed create --start-ts=449533008244637697 '--sink-uri=kafka://127.0.0.1:9092/ticdc-split-region-test-4402?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --config /tmp/tidb_cdc_test/split_region/pulsar_test.toml Create changefeed successfully! ID: 115a7b44-cee1-4027-ac51-2f8dccaf8d3c Info: {"upstream_id":7365148750263858544,"namespace":"default","id":"115a7b44-cee1-4027-ac51-2f8dccaf8d3c","sink_uri":"kafka://127.0.0.1:9092/ticdc-split-region-test-4402?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T22:19:03.916628716+08:00","start_ts":449533008244637697,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-52-g6a342866d","resolved_ts":449533008244637697,"checkpoint_ts":449533008244637697,"checkpoint_time":"2024-05-04 22:18:57.359"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + set +x [Sat May 4 22:19:05 CST 2024] <<<<<< START kafka consumer in split_region case >>>>>> table split_region.test1 not exists for 1-th check, retry later check diff failed at last A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' cat: /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/sync_diff/output/sync_diff.log: No such file or directory table split_region.test1 not exists for 2-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sql_mode/run.sh using Sink-Type: kafka... <<================= +++ dirname /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sql_mode/run.sh ++ cd /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sql_mode ++ pwd + CUR=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sql_mode + source /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sql_mode/../_utils/test_prepare ++ UP_TIDB_HOST=127.0.0.1 ++ UP_TIDB_PORT=4000 ++ UP_TIDB_OTHER_PORT=4001 ++ UP_TIDB_STATUS=10080 ++ UP_TIDB_OTHER_STATUS=10081 ++ DOWN_TIDB_HOST=127.0.0.1 ++ DOWN_TIDB_PORT=3306 ++ DOWN_TIDB_STATUS=20080 ++ TLS_TIDB_HOST=127.0.0.1 ++ TLS_TIDB_PORT=3307 ++ TLS_TIDB_STATUS=30080 ++ UP_PD_HOST_1=127.0.0.1 ++ UP_PD_PORT_1=2379 ++ UP_PD_PEER_PORT_1=2380 ++ UP_PD_HOST_2=127.0.0.1 ++ UP_PD_PORT_2=2679 ++ UP_PD_PEER_PORT_2=2680 ++ UP_PD_HOST_3=127.0.0.1 ++ UP_PD_PORT_3=2779 ++ UP_PD_PEER_PORT_3=2780 ++ DOWN_PD_HOST=127.0.0.1 ++ DOWN_PD_PORT=2479 ++ DOWN_PD_PEER_PORT=2480 ++ TLS_PD_HOST=127.0.0.1 ++ TLS_PD_PORT=2579 ++ TLS_PD_PEER_PORT=2580 ++ UP_TIKV_HOST_1=127.0.0.1 ++ UP_TIKV_PORT_1=20160 ++ UP_TIKV_STATUS_PORT_1=20181 ++ UP_TIKV_HOST_2=127.0.0.1 ++ UP_TIKV_PORT_2=20161 ++ UP_TIKV_STATUS_PORT_2=20182 ++ UP_TIKV_HOST_3=127.0.0.1 ++ UP_TIKV_PORT_3=20162 ++ UP_TIKV_STATUS_PORT_3=20183 ++ DOWN_TIKV_HOST=127.0.0.1 ++ DOWN_TIKV_PORT=21160 ++ DOWN_TIKV_STATUS_PORT=21180 ++ TLS_TIKV_HOST=127.0.0.1 ++ TLS_TIKV_PORT=22160 ++ TLS_TIKV_STATUS_PORT=22180 +++ cat /tmp/tidb_cdc_test/KAFKA_VERSION +++ echo 2.4.1 ++ KAFKA_VERSION=2.4.1 + WORK_DIR=/tmp/tidb_cdc_test/sql_mode + CDC_BINARY=cdc.test + SINK_TYPE=kafka + CDC_COUNT=3 + DB_COUNT=4 + rm -rf /tmp/tidb_cdc_test/sql_mode + mkdir -p /tmp/tidb_cdc_test/sql_mode + start_tidb_cluster --workdir /tmp/tidb_cdc_test/sql_mode The 1 times to try to start tidb cluster... table split_region.test1 exists table split_region.test2 exists check diff failed 1-th time, retry later check diff successfully start tidb cluster in /tmp/tidb_cdc_test/sql_mode Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only_avro/run.sh: line 1: 16210 Killed cdc_kafka_consumer --upstream-uri $SINK_URI --downstream-uri="mysql://root@127.0.0.1:3306/?safe-mode=true&batch-dml-enable=false" --upstream-tidb-dsn="root@tcp(${UP_TIDB_HOST}:${UP_TIDB_PORT})/?" --config="$CUR/conf/changefeed.toml" 2>&1 [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] // cache [Pipeline] } [Pipeline] // dir Post stage [Pipeline] sh + ls /tmp/tidb_cdc_test/ cli_tls_with_auth cov.cli_tls_with_auth.36613663.out cov.cli_tls_with_auth.cli.3598.out cov.cli_tls_with_auth.cli.3719.out cov.cli_tls_with_auth.cli.3982.out cov.cli_tls_with_auth.cli.4067.out cov.cli_tls_with_auth.cli.4106.out cov.cli_tls_with_auth.cli.4199.out cov.cli_tls_with_auth.cli.4281.out cov.cli_tls_with_auth.cli.4391.out cov.cli_tls_with_auth.cli.4429.out cov.cli_tls_with_auth.cli.4460.out cov.cli_tls_with_auth.cli.4572.out cov.cli_tls_with_auth.cli.4608.out cov.kafka_simple_basic.71857187.out cov.kafka_simple_basic.cli.7238.out cov.kafka_simple_basic.cli.7366.out cov.kafka_simple_basic.cli.7398.out cov.kafka_simple_basic_avro.1015310155.out cov.kafka_simple_basic_avro.cli.10214.out cov.kafka_simple_handle_key_only.1304813050.out cov.kafka_simple_handle_key_only.cli.13112.out cov.kafka_simple_handle_key_only.cli.13155.out cov.kafka_simple_handle_key_only.cli.13208.out cov.kafka_simple_handle_key_only.cli.13250.out cov.kafka_simple_handle_key_only.cli.13284.out cov.kafka_simple_handle_key_only_avro.cli.16005.out cov.kafka_simple_handle_key_only_avro.cli.16044.out cov.kafka_simple_handle_key_only_avro.cli.16112.out cov.kafka_simple_handle_key_only_avro.cli.16140.out cov.kafka_simple_handle_key_only_avro.cli.16171.out kafka_simple_basic kafka_simple_basic_avro kafka_simple_handle_key_only kafka_simple_handle_key_only_avro simple_handle_key_only simple_handle_key_only_avro sql_res.cli_tls_with_auth.txt sql_res.kafka_simple_basic.txt sql_res.kafka_simple_basic_avro.txt sql_res.kafka_simple_handle_key_only.txt sql_res.kafka_simple_handle_key_only_avro.txt ++ find /tmp/tidb_cdc_test/ -type f -name '*.log' + tar -cvzf log-G01.tar.gz /tmp/tidb_cdc_test/kafka_simple_basic_avro/tikv1.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/stdout.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/pd1.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tikv2.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/sync_diff_inspector.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/down_pd.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tidb_other.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/cdc.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tidb.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/cdc_kafka_consumer.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/cdc_kafka_consumer_stdout.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tidb_down.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tidb-slow.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tikv_down.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tikv3.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tikv1.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/stdout.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/pd1.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tikv2.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/sync_diff_inspector.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/down_pd.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tidb_other.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/cdc.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tidb.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/cdc_kafka_consumer.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tidb_down.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tidb-slow.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tikv_down.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tikv3.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv1.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/db/proxy/db/000005.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/log/server.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/log/error.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/log/proxy.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/stdout.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/pd1.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv2.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/sync_diff_inspector.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/down_pd.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tidb_other.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0001/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0007/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0000/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0004/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0005/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0002/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0003/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0006/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv1/db/000005.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/down_pd/region-meta/000001.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/down_pd/hot-region/000001.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv_down/db/000005.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tidb.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_kafka_consumer.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/pd1/region-meta/000001.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/pd1/hot-region/000001.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tidb_down.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tidb-slow.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv_down.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv3.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv2/db/000005.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv3/db/000005.log /tmp/tidb_cdc_test/kafka_simple_basic/tikv1.log /tmp/tidb_cdc_test/kafka_simple_basic/stdout.log /tmp/tidb_cdc_test/kafka_simple_basic/pd1.log /tmp/tidb_cdc_test/kafka_simple_basic/tikv2.log /tmp/tidb_cdc_test/kafka_simple_basic/sync_diff_inspector.log /tmp/tidb_cdc_test/kafka_simple_basic/down_pd.log /tmp/tidb_cdc_test/kafka_simple_basic/tidb_other.log /tmp/tidb_cdc_test/kafka_simple_basic/cdc.log /tmp/tidb_cdc_test/kafka_simple_basic/tidb.log /tmp/tidb_cdc_test/kafka_simple_basic/cdc_kafka_consumer.log /tmp/tidb_cdc_test/kafka_simple_basic/cdc_kafka_consumer_stdout.log /tmp/tidb_cdc_test/kafka_simple_basic/tidb_down.log /tmp/tidb_cdc_test/kafka_simple_basic/tidb-slow.log /tmp/tidb_cdc_test/kafka_simple_basic/tikv_down.log /tmp/tidb_cdc_test/kafka_simple_basic/tikv3.log /tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log /tmp/tidb_cdc_test/cli_tls_with_auth/tikv1.log /tmp/tidb_cdc_test/cli_tls_with_auth/cdc_cli_tls_with_auth_tls1.log /tmp/tidb_cdc_test/cli_tls_with_auth/pd1.log /tmp/tidb_cdc_test/cli_tls_with_auth/tikv2.log /tmp/tidb_cdc_test/cli_tls_with_auth/tidb_tls.log /tmp/tidb_cdc_test/cli_tls_with_auth/down_pd.log /tmp/tidb_cdc_test/cli_tls_with_auth/tidb_other.log /tmp/tidb_cdc_test/cli_tls_with_auth/stdout_cli_tls_with_auth_tls1.log /tmp/tidb_cdc_test/cli_tls_with_auth/tidb.log /tmp/tidb_cdc_test/cli_tls_with_auth/pd_tls.log /tmp/tidb_cdc_test/cli_tls_with_auth/cdc_kafka_consumer.log /tmp/tidb_cdc_test/cli_tls_with_auth/cdc_kafka_consumer_stdout.log /tmp/tidb_cdc_test/cli_tls_with_auth/pd2.log /tmp/tidb_cdc_test/cli_tls_with_auth/tidb_down.log /tmp/tidb_cdc_test/cli_tls_with_auth/tidb-slow.log /tmp/tidb_cdc_test/cli_tls_with_auth/tikv_down.log /tmp/tidb_cdc_test/cli_tls_with_auth/tikv3.log /tmp/tidb_cdc_test/cli_tls_with_auth/tikv_tls.log /tmp/tidb_cdc_test/cli_tls_with_auth/pd3.log tar: Removing leading `/' from member names /tmp/tidb_cdc_test/kafka_simple_basic_avro/tikv1.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/stdout.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/pd1.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tikv2.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/sync_diff_inspector.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/down_pd.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tidb_other.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/cdc.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tidb.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/cdc_kafka_consumer.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/cdc_kafka_consumer_stdout.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tidb_down.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tidb-slow.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tikv_down.log ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) /tmp/tidb_cdc_test/kafka_simple_basic_avro/tikv3.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tikv1.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/stdout.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/pd1.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tikv2.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/sync_diff_inspector.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/down_pd.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tidb_other.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/cdc.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tidb.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/cdc_kafka_consumer.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tidb_down.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tidb-slow.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tikv_down.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tikv3.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv1.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/db/proxy/db/000005.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/log/server.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/log/error.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/log/proxy.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/stdout.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/pd1.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv2.log check diff failed 1-th time, retry later /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/sync_diff_inspector.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/down_pd.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tidb_other.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0001/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0007/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0000/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0004/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0005/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0002/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0003/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0006/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv1/db/000005.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/down_pd/region-meta/000001.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/down_pd/hot-region/000001.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv_down/db/000005.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tidb.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_kafka_consumer.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/pd1/region-meta/000001.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/pd1/hot-region/000001.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tidb_down.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tidb-slow.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv_down.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv3.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv2/db/000005.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv3/db/000005.log /tmp/tidb_cdc_test/kafka_simple_basic/tikv1.log /tmp/tidb_cdc_test/kafka_simple_basic/stdout.log /tmp/tidb_cdc_test/kafka_simple_basic/pd1.log /tmp/tidb_cdc_test/kafka_simple_basic/tikv2.log /tmp/tidb_cdc_test/kafka_simple_basic/sync_diff_inspector.log /tmp/tidb_cdc_test/kafka_simple_basic/down_pd.log /tmp/tidb_cdc_test/kafka_simple_basic/tidb_other.log /tmp/tidb_cdc_test/kafka_simple_basic/cdc.log /tmp/tidb_cdc_test/kafka_simple_basic/tidb.log /tmp/tidb_cdc_test/kafka_simple_basic/cdc_kafka_consumer.log /tmp/tidb_cdc_test/kafka_simple_basic/cdc_kafka_consumer_stdout.log /tmp/tidb_cdc_test/kafka_simple_basic/tidb_down.log /tmp/tidb_cdc_test/kafka_simple_basic/tidb-slow.log /tmp/tidb_cdc_test/kafka_simple_basic/tikv_down.log VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fe540c80003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m, pid:29830, start at 2024-05-04 22:19:23.571352878 +0800 CST m=+5.259650769 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:21:23.578 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:19:23.570 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:09:23.570 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) 10:19PM INF > Info cdc.mysql=kafka://127.0.0.1:9092/output_debezium cdc.tidb=kafka://127.0.0.1:9092/output_ticdc db.mysql=root@tcp(127.0.0.1:3310)/{db}?allowNativePasswords=true db.tidb=root@tcp(127.0.0.1:4000)/{db}?allowNativePasswords=true 10:19PM INF > Run case=sql/data_types.sql /tmp/tidb_cdc_test/kafka_simple_basic/tikv3.log /tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log /tmp/tidb_cdc_test/cli_tls_with_auth/tikv1.log /tmp/tidb_cdc_test/cli_tls_with_auth/cdc_cli_tls_with_auth_tls1.log /tmp/tidb_cdc_test/cli_tls_with_auth/pd1.log /tmp/tidb_cdc_test/cli_tls_with_auth/tikv2.log /tmp/tidb_cdc_test/cli_tls_with_auth/tidb_tls.log /tmp/tidb_cdc_test/cli_tls_with_auth/down_pd.log /tmp/tidb_cdc_test/cli_tls_with_auth/tidb_other.log /tmp/tidb_cdc_test/cli_tls_with_auth/stdout_cli_tls_with_auth_tls1.log /tmp/tidb_cdc_test/cli_tls_with_auth/tidb.log /tmp/tidb_cdc_test/cli_tls_with_auth/pd_tls.log /tmp/tidb_cdc_test/cli_tls_with_auth/cdc_kafka_consumer.log /tmp/tidb_cdc_test/cli_tls_with_auth/cdc_kafka_consumer_stdout.log /tmp/tidb_cdc_test/cli_tls_with_auth/pd2.log /tmp/tidb_cdc_test/cli_tls_with_auth/tidb_down.log /tmp/tidb_cdc_test/cli_tls_with_auth/tidb-slow.log /tmp/tidb_cdc_test/cli_tls_with_auth/tikv_down.log /tmp/tidb_cdc_test/cli_tls_with_auth/tikv3.log /tmp/tidb_cdc_test/cli_tls_with_auth/tikv_tls.log /tmp/tidb_cdc_test/cli_tls_with_auth/pd3.log + ls -alh log-G01.tar.gz -rw-r--r--. 1 jenkins jenkins 20M May 4 22:19 log-G01.tar.gz [Pipeline] archiveArtifacts Archiving artifacts check diff failed 2-th time, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fe540c80003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m, pid:29830, start at 2024-05-04 22:19:23.571352878 +0800 CST m=+5.259650769 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:21:23.578 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:19:23.570 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:09:23.570 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0fe543100014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1853-5mh68-n9h3m, pid:29912, start at 2024-05-04 22:19:23.737264152 +0800 CST m=+5.368543756 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-22:21:23.746 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-22:19:23.716 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-22:09:23.716 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/sql_mode/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/sql_mode/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/sql_mode/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/sql_mode/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/sql_mode/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Recording fingerprints [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] } check diff successfully [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G01' Sending interrupt signal to process Killing processes wait process cdc.test exit for 1-th time... kill finished with exit code 0 Sending interrupt signal to process Killing processes script returned exit code 143 kill finished with exit code 0 Sending interrupt signal to process Killing processes + trap stop_tidb_cluster EXIT + run_sql 'set global sql_mode='\''NO_BACKSLASH_ESCAPES'\'';' 127.0.0.1 4000 + run_sql 'set global sql_mode='\''NO_BACKSLASH_ESCAPES'\'';' 127.0.0.1 3306 + cd /tmp/tidb_cdc_test/sql_mode ++ run_cdc_cli_tso_query 127.0.0.1 2379 + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sql_mode.cli.31369.out cli tso query --pd=http://127.0.0.1:2379 wait process cdc.test exit for 2-th time... script returned exit code 143 kill finished with exit code 0 Sending interrupt signal to process Killing processes kill finished with exit code 0 Sending interrupt signal to process Killing processes kill finished with exit code 0 [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] // cache [Pipeline] // cache [Pipeline] } [Pipeline] } ++ stop_tidb_cluster {"level":"warn","ts":1714832369.9128957,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc002278700/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: EOF"} script returned exit code 143 [Pipeline] // dir [Pipeline] // dir [Pipeline] } [Pipeline] } [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // cache [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // dir [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // stage [Pipeline] // stage [Pipeline] // withCredentials [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // container [Pipeline] // container [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // stage [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // node [Pipeline] // node [Pipeline] // container [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // withEnv [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // withEnv [Pipeline] // withEnv ++ stop_tidb_cluster script returned exit code 143 [Pipeline] // node [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] // stage [Pipeline] // stage [Pipeline] // podTemplate [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G00' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G07' [Pipeline] } [Pipeline] // cache [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // stage ++ stop_tidb_cluster script returned exit code 143 [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G10' [Pipeline] // withCredentials [Pipeline] } [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] // timeout [Pipeline] } [Pipeline] // cache [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G09' [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G08' [Pipeline] // parallel [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] End of Pipeline ERROR: script returned exit code 1 Finished: FAILURE