Started by user Jenkins Admin Obtained pipelines/pingcap/tiflow/latest/pull_cdc_integration_kafka_test.groovy from git https://github.com/PingCAP-QE/ci.git Loading library tipipeline@main Library tipipeline@main is cached. Copying from home. [Pipeline] Start of Pipeline [Pipeline] readJSON [Pipeline] readTrusted Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7fs23-49jpb Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7fs23-49jpb is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1828-7fs23-d7r2q --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "4d45468b72cfeeb4b3baa42f549aac9eec36a168" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1828-7fs23" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7fs23-49jpb" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7fs23-49jpb" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7fs23-49jpb" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7fs23-49jpb in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] stage [Pipeline] { (Declarative: Checkout SCM) [Pipeline] checkout The recommended git tool is: git No credentials specified Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git rev-list --no-walk 03312178c534dce949face80c69812d989e55009 # timeout=10 [Pipeline] } [Pipeline] // stage [Pipeline] withEnv [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] timeout Timeout set to expire in 1 hr 5 min [Pipeline] { [Pipeline] stage [Pipeline] { (Debug info) [Pipeline] sh + printenv PROW_JOB_ID=b2dca6a5-5209-4287-bddb-a889752d92c6 JENKINS_NODE_COOKIE=82e33445-45b3-4da4-af87-2945d1d1be94 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7fs23-49jpb HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Debug info BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786654730638856195","prowjobid":"b2dca6a5-5209-4287-bddb-a889752d92c6","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"41fc0de5f6af46f8da184375c7b63207ef1c184b","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/41fc0de5f6af46f8da184375c7b63207ef1c184b","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct _=/usr/bin/printenv POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test HUDSON_URL=https://do.pingcap.net/jenkins/ JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1828 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786654730638856195 GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=tests SHLVL=3 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-7fs23 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-236df335481f9578f70eb859f68d5ceead3aa27f6c9385fda1ec4c08661c0305 NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7fs23-49jpb pingcap_tiflow_pull_cdc_integration_kafka_test_1828-7fs23 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7fs23-49jpb GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1828 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz + echo ------------------------- ------------------------- + go env GO111MODULE='' GOARCH='amd64' GOBIN='' GOCACHE='/home/jenkins/.cache/go-build' GOENV='/home/jenkins/.config/go/env' GOEXE='' GOEXPERIMENT='' GOFLAGS='' GOHOSTARCH='amd64' GOHOSTOS='linux' GOINSECURE='' GOMODCACHE='/go/pkg/mod' GONOPROXY='' GONOSUMDB='' GOOS='linux' GOPATH='/go' GOPRIVATE='' GOPROXY='http://goproxy.apps.svc,https://proxy.golang.org,direct' GOROOT='/usr/local/go' GOSUMDB='sum.golang.org' GOTMPDIR='' GOTOOLCHAIN='auto' GOTOOLDIR='/usr/local/go/pkg/tool/linux_amd64' GOVCS='' GOVERSION='go1.21.0' GCCGO='gccgo' GOAMD64='v1' AR='ar' CC='gcc' CXX='g++' CGO_ENABLED='1' GOMOD='/dev/null' GOWORK='' CGO_CFLAGS='-O2 -g' CGO_CPPFLAGS='' CGO_CXXFLAGS='-O2 -g' CGO_FFLAGS='-O2 -g' CGO_LDFLAGS='-O2 -g' PKG_CONFIG='pkg-config' GOGCCFLAGS='-fPIC -m64 -pthread -Wl,--no-gc-sections -fmessage-length=0 -fdebug-prefix-map=/tmp/go-build2277083544=/tmp/go-build -gno-record-gcc-switches' + echo ------------------------- ------------------------- + echo 'debug command: kubectl -n jenkins-tiflow exec -ti pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7fs23-49jpb bash' debug command: kubectl -n jenkins-tiflow exec -ti pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7fs23-49jpb bash [Pipeline] container [Pipeline] { [Pipeline] sh + dig github.com ; <<>> DiG 9.18.16 <<>> github.com ;; global options: +cmd ;; Got answer: ;; ->>HEADER<<- opcode: QUERY, status: NOERROR, id: 35428 ;; flags: qr aa rd ra; QUERY: 1, ANSWER: 1, AUTHORITY: 0, ADDITIONAL: 1 ;; OPT PSEUDOSECTION: ; EDNS: version: 0, flags:; udp: 1232 ; COOKIE: 1858b87a1ad9f4ae (echoed) ;; QUESTION SECTION: ;github.com. IN A ;; ANSWER SECTION: github.com. 20 IN A 20.205.243.166 ;; Query time: 0 msec ;; SERVER: 169.254.25.10#53(169.254.25.10) (UDP) ;; WHEN: Sat May 04 07:11:25 UTC 2024 ;; MSG SIZE rcvd: 77 [Pipeline] script [Pipeline] { [Pipeline] } [Pipeline] // script [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Check diff files) [Pipeline] container [Pipeline] { [Pipeline] script [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $token [Pipeline] { [Pipeline] httpRequest Warning: A secret was passed to "httpRequest" using Groovy String interpolation, which is insecure. Affected argument(s) used the following variable(s): [token] See https://jenkins.io/redirect/groovy-string-interpolation for details. HttpMethod: GET URL: https://api.github.com/repos/pingcap/tiflow/pulls/10919/files?page=1&per_page=100 Content-Type: application/json Authorization: ***** Sending request to url: https://api.github.com/repos/pingcap/tiflow/pulls/10919/files?page=1&per_page=100 Response Code: HTTP/1.1 200 OK Success: Status code 200 is in the accepted range: 100:399 [Pipeline] httpRequest Warning: A secret was passed to "httpRequest" using Groovy String interpolation, which is insecure. Affected argument(s) used the following variable(s): [token] See https://jenkins.io/redirect/groovy-string-interpolation for details. HttpMethod: GET URL: https://api.github.com/repos/pingcap/tiflow/pulls/10919/files?page=2&per_page=100 Content-Type: application/json Authorization: ***** Sending request to url: https://api.github.com/repos/pingcap/tiflow/pulls/10919/files?page=2&per_page=100 Response Code: HTTP/1.1 200 OK Success: Status code 200 is in the accepted range: 100:399 [Pipeline] } [Pipeline] // withCredentials [Pipeline] echo pr_diff_files: [cdc/model/kv.go, cdc/model/sink.go, cdc/model/sink_test.go, cdc/processor/processor.go, cdc/processor/sinkmanager/manager.go, cdc/processor/sourcemanager/manager.go, cdc/redo/reader/reader.go, cdc/sink/dmlsink/factory/factory.go, cdc/sink/dmlsink/txn/mysql/mysql.go, cdc/sink/dmlsink/txn/mysql/mysql_test.go, cmd/kafka-consumer/main.go, cmd/pulsar-consumer/main.go, cmd/storage-consumer/main.go, errors.toml, pkg/applier/redo.go, pkg/applier/redo_test.go, pkg/errors/cdc_errors.go, pkg/errors/helper.go, tests/integration_tests/_utils/check_sync_diff, tests/integration_tests/changefeed_dup_error_restart/conf/diff_config.toml, tests/integration_tests/changefeed_dup_error_restart/conf/workload, tests/integration_tests/changefeed_dup_error_restart/run.sh, tests/integration_tests/force_replicate_table/run.sh, tests/integration_tests/open_protocol_handle_key_only/data/data.sql, tests/integration_tests/open_protocol_handle_key_only/run.sh, tests/integration_tests/run_group.sh] [Pipeline] echo diff file not matched: cdc/model/kv.go [Pipeline] } [Pipeline] // script [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Checkout) [Pipeline] timeout Timeout set to expire in 10 min [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache Cache restored successfully (git/pingcap/tiflow/rev-be15534) 203635712 bytes in 1.15 secs (177417362 bytes/sec) [Pipeline] { [Pipeline] retry [Pipeline] { [Pipeline] script [Pipeline] { [Pipeline] sh git version 2.36.6 Reinitialized existing Git repository in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/.git/ .git HEAD is now at be1553484 codec(ticdc): avro simplify the unit test (#11010) POST git-upload-pack (656 bytes) POST git-upload-pack (973 bytes) From https://github.com/pingcap/tiflow = [up to date] master -> origin/master * [new ref] refs/pull/10919/head -> origin/pr/10919/head HEAD is now at be1553484 codec(ticdc): avro simplify the unit test (#11010) ๐Ÿšง Checkouting to base SHA:be1553484fe4c03594eabb8d7435c694e5fd7224... HEAD is now at be1553484 codec(ticdc): avro simplify the unit test (#11010) โœ… Checked. ๐ŸŽ‰ ๐Ÿงพ HEAD info: be1553484fe4c03594eabb8d7435c694e5fd7224 be1553484 codec(ticdc): avro simplify the unit test (#11010) 2a7a65c6f Support Sequences (#10203) 36e9e1bf6 cli(ticdc): allow client authentication to be enabled without tls (#11005) ๐Ÿšง Pre-merge heads of pull requests to base SHA: be1553484fe4c03594eabb8d7435c694e5fd7224 ... Updating be1553484..41fc0de5f Fast-forward cdc/model/kv.go | 5 + cdc/model/sink.go | 35 ++- cdc/model/sink_test.go | 9 +- cdc/processor/processor.go | 21 +- cdc/processor/sinkmanager/manager.go | 5 + cdc/processor/sourcemanager/manager.go | 66 +++- cdc/redo/reader/reader.go | 21 +- cdc/sink/dmlsink/factory/factory.go | 8 +- cdc/sink/dmlsink/txn/mysql/mysql.go | 89 +++--- cdc/sink/dmlsink/txn/mysql/mysql_test.go | 2 +- cmd/kafka-consumer/main.go | 4 +- cmd/pulsar-consumer/main.go | 17 +- cmd/storage-consumer/main.go | 4 +- errors.toml | 5 + pkg/applier/redo.go | 303 +++++++++++++++++- pkg/applier/redo_test.go | 347 ++++++++++++++++++++- pkg/errors/cdc_errors.go | 4 + pkg/errors/helper.go | 19 ++ tests/integration_tests/_utils/check_sync_diff | 2 +- .../conf/diff_config.toml | 29 ++ .../changefeed_dup_error_restart/conf/workload | 13 + .../changefeed_dup_error_restart/run.sh | 54 ++++ .../integration_tests/force_replicate_table/run.sh | 4 +- .../open_protocol_handle_key_only/data/data.sql | 23 +- .../open_protocol_handle_key_only/run.sh | 2 + tests/integration_tests/run_group.sh | 5 +- 26 files changed, 974 insertions(+), 122 deletions(-) create mode 100644 tests/integration_tests/changefeed_dup_error_restart/conf/diff_config.toml create mode 100644 tests/integration_tests/changefeed_dup_error_restart/conf/workload create mode 100755 tests/integration_tests/changefeed_dup_error_restart/run.sh ๐Ÿงพ Pre-merged result: 41fc0de5f6af46f8da184375c7b63207ef1c184b 41fc0de5f f 172d5e4f4 f 58ed95284 f โœ… Pre merged ๐ŸŽ‰ โœ… ~~~~~All done.~~~~~~ [Pipeline] } [Pipeline] // script [Pipeline] } [Pipeline] // retry [Pipeline] } Cache saved successfully (git/pingcap/tiflow/rev-be15534-41fc0de) 203844096 bytes in 5.33 secs (38224492 bytes/sec) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (prepare) [Pipeline] timeout Timeout set to expire in 20 min [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/third_party_download [Pipeline] { [Pipeline] retry [Pipeline] { [Pipeline] sh + cd ../tiflow + ./scripts/download-integration-test-binaries.sh master Download binaries... % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 1655 0 --:--:-- --:--:-- --:--:-- 1708 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 199 0 --:--:-- --:--:-- --:--:-- 200 100 41 100 41 0 0 199 0 --:--:-- --:--:-- --:--:-- 199 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 2681 0 --:--:-- --:--:-- --:--:-- 2733 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 1131 0 --:--:-- --:--:-- --:--:-- 1138 >>> download tidb-server.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/tidb/600b2ed4bf0aa38224a1c4c4c68831820735515c/centos7/tidb-server.tar.gz 2024-05-04 15:11:50 URL:http://fileserver.pingcap.net/download/builds/pingcap/tidb/600b2ed4bf0aa38224a1c4c4c68831820735515c/centos7/tidb-server.tar.gz [536570515/536570515] -> "tmp/tidb-server.tar.gz" [1] >>> download pd-server.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/pd/1679dbca25b3483d1375c7e747da27e99ad77360/centos7/pd-server.tar.gz 2024-05-04 15:12:01 URL:http://fileserver.pingcap.net/download/builds/pingcap/pd/1679dbca25b3483d1375c7e747da27e99ad77360/centos7/pd-server.tar.gz [187372022/187372022] -> "tmp/pd-server.tar.gz" [1] >>> download tikv-server.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/tikv/72a0fd5b00235a7c56014b77ddd933e2a0d33c88/centos7/tikv-server.tar.gz 2024-05-04 15:12:16 URL:http://fileserver.pingcap.net/download/builds/pingcap/tikv/72a0fd5b00235a7c56014b77ddd933e2a0d33c88/centos7/tikv-server.tar.gz [919098782/919098782] -> "tmp/tikv-server.tar.gz" [1] >>> download tiflash.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/tiflash/master/8e170090fad91c94bef8d908e21c195c1d145b02/centos7/tiflash.tar.gz 2024-05-04 15:12:30 URL:http://fileserver.pingcap.net/download/builds/pingcap/tiflash/master/8e170090fad91c94bef8d908e21c195c1d145b02/centos7/tiflash.tar.gz [456057803/456057803] -> "tmp/tiflash.tar.gz" [1] >>> download minio.tar.gz from http://fileserver.pingcap.net/download/minio.tar.gz 2024-05-04 15:12:35 URL:http://fileserver.pingcap.net/download/minio.tar.gz [17718777/17718777] -> "tmp/minio.tar.gz" [1] >>> download go-ycsb from http://fileserver.pingcap.net/download/builds/pingcap/go-ycsb/test-br/go-ycsb 2024-05-04 15:12:37 URL:http://fileserver.pingcap.net/download/builds/pingcap/go-ycsb/test-br/go-ycsb [45975512/45975512] -> "third_bin/go-ycsb" [1] >>> download jq from http://fileserver.pingcap.net/download/builds/pingcap/test/jq-1.6/jq-linux64 2024-05-04 15:12:37 URL:http://fileserver.pingcap.net/download/builds/pingcap/test/jq-1.6/jq-linux64 [3953824/3953824] -> "third_bin/jq" [1] >>> download etcd.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/cdc/etcd-v3.4.7-linux-amd64.tar.gz 2024-05-04 15:12:37 URL:http://fileserver.pingcap.net/download/builds/pingcap/cdc/etcd-v3.4.7-linux-amd64.tar.gz [17310840/17310840] -> "tmp/etcd.tar.gz" [1] >>> download sync_diff_inspector.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/cdc/sync_diff_inspector_hash-d671b084_linux-amd64.tar.gz 2024-05-04 15:12:39 URL:http://fileserver.pingcap.net/download/builds/pingcap/cdc/sync_diff_inspector_hash-d671b084_linux-amd64.tar.gz [79877126/79877126] -> "tmp/sync_diff_inspector.tar.gz" [1] >>> download schema-registry.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/cdc/schema-registry.tar.gz 2024-05-04 15:12:45 URL:http://fileserver.pingcap.net/download/builds/pingcap/cdc/schema-registry.tar.gz [278386006/278386006] -> "tmp/schema-registry.tar.gz" [1] Download SUCCESS + ls -alh ./bin total 1.9G drwxr-sr-x. 6 jenkins jenkins 4.0K May 4 15:12 . drwxr-sr-x. 19 jenkins jenkins 4.0K May 4 15:12 .. drwxr-sr-x. 2 jenkins jenkins 4.0K May 19 2023 bin drwxr-sr-x. 4 jenkins jenkins 4.0K May 10 2023 etc -rwxr-xr-x. 1 jenkins jenkins 17M Apr 2 2020 etcdctl -rwxr-xr-x. 1 jenkins jenkins 44M May 4 15:12 go-ycsb -rwxr-xr-x. 1 jenkins jenkins 3.8M May 4 15:12 jq drwxr-sr-x. 3 jenkins jenkins 4.0K May 10 2023 lib lrwxrwxrwx. 1 jenkins jenkins 13 Apr 30 11:15 libc++.so.1 -> libc++.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 1016K Nov 7 01:00 libc++.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 16 Apr 30 11:15 libc++abi.so.1 -> libc++abi.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 358K Nov 7 01:00 libc++abi.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 13 Apr 30 11:15 libgmssl.so -> libgmssl.so.3 lrwxrwxrwx. 1 jenkins jenkins 15 Apr 30 11:15 libgmssl.so.3 -> libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 2.6M Apr 30 10:34 libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 272M Apr 30 11:16 libtiflash_proxy.so -rwxr-xr-x. 1 jenkins jenkins 50M Jul 29 2020 minio -rwxr-xr-x. 1 jenkins jenkins 37M Apr 30 16:11 pd-api-bench -rwxr-xr-x. 1 jenkins jenkins 44M Apr 30 16:10 pd-ctl -rwxr-xr-x. 1 jenkins jenkins 36M Apr 30 16:10 pd-heartbeat-bench -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:10 pd-recover -rwxr-xr-x. 1 jenkins jenkins 106M Apr 30 16:10 pd-server -rwxr-xr-x. 1 jenkins jenkins 26M Apr 30 16:10 pd-tso-bench -rwxr-xr-x. 1 jenkins jenkins 3.0M Apr 30 16:11 pd-ut -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:10 regions-dump drwxr-sr-x. 4 jenkins jenkins 4.0K May 10 2023 share -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:11 stores-dump -rwxr-xr-x. 1 jenkins jenkins 192M Sep 22 2023 sync_diff_inspector -rwxr-xr-x. 1 jenkins jenkins 208M May 1 10:57 tidb-server -rwxr-xr-x. 1 jenkins jenkins 380M Apr 30 11:15 tiflash -rwxr-xr-x. 1 jenkins jenkins 418M Apr 30 11:29 tikv-server -rwxr-xr-x. 1 jenkins jenkins 2.0M Apr 30 16:11 xprog + make check_third_party_binary /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/tidb-server /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/tikv-server /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/pd-server /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/tiflash /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/pd-ctl /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/sync_diff_inspector /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/go-ycsb /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/etcdctl /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/jq /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/minio /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/bin/schema-registry-start + cd - /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/third_party_download + mkdir -p bin + mv ../tiflow/bin/bin ../tiflow/bin/etc ../tiflow/bin/etcdctl ../tiflow/bin/go-ycsb ../tiflow/bin/jq ../tiflow/bin/lib ../tiflow/bin/libc++.so.1 ../tiflow/bin/libc++.so.1.0 ../tiflow/bin/libc++abi.so.1 ../tiflow/bin/libc++abi.so.1.0 ../tiflow/bin/libgmssl.so ../tiflow/bin/libgmssl.so.3 ../tiflow/bin/libgmssl.so.3.0 ../tiflow/bin/libtiflash_proxy.so ../tiflow/bin/minio ../tiflow/bin/pd-api-bench ../tiflow/bin/pd-ctl ../tiflow/bin/pd-heartbeat-bench ../tiflow/bin/pd-recover ../tiflow/bin/pd-server ../tiflow/bin/pd-tso-bench ../tiflow/bin/pd-ut ../tiflow/bin/regions-dump ../tiflow/bin/share ../tiflow/bin/stores-dump ../tiflow/bin/sync_diff_inspector ../tiflow/bin/tidb-server ../tiflow/bin/tiflash ../tiflow/bin/tikv-server ../tiflow/bin/xprog ./bin/ + ls -alh ./bin total 1.9G drwxr-sr-x. 6 jenkins jenkins 4.0K May 4 15:12 . drwxr-sr-x. 3 jenkins jenkins 4.0K May 4 15:12 .. drwxr-sr-x. 2 jenkins jenkins 4.0K May 19 2023 bin drwxr-sr-x. 4 jenkins jenkins 4.0K May 10 2023 etc -rwxr-xr-x. 1 jenkins jenkins 17M Apr 2 2020 etcdctl -rwxr-xr-x. 1 jenkins jenkins 44M May 4 15:12 go-ycsb -rwxr-xr-x. 1 jenkins jenkins 3.8M May 4 15:12 jq drwxr-sr-x. 3 jenkins jenkins 4.0K May 10 2023 lib lrwxrwxrwx. 1 jenkins jenkins 13 Apr 30 11:15 libc++.so.1 -> libc++.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 1016K Nov 7 01:00 libc++.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 16 Apr 30 11:15 libc++abi.so.1 -> libc++abi.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 358K Nov 7 01:00 libc++abi.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 13 Apr 30 11:15 libgmssl.so -> libgmssl.so.3 lrwxrwxrwx. 1 jenkins jenkins 15 Apr 30 11:15 libgmssl.so.3 -> libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 2.6M Apr 30 10:34 libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 272M Apr 30 11:16 libtiflash_proxy.so -rwxr-xr-x. 1 jenkins jenkins 50M Jul 29 2020 minio -rwxr-xr-x. 1 jenkins jenkins 37M Apr 30 16:11 pd-api-bench -rwxr-xr-x. 1 jenkins jenkins 44M Apr 30 16:10 pd-ctl -rwxr-xr-x. 1 jenkins jenkins 36M Apr 30 16:10 pd-heartbeat-bench -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:10 pd-recover -rwxr-xr-x. 1 jenkins jenkins 106M Apr 30 16:10 pd-server -rwxr-xr-x. 1 jenkins jenkins 26M Apr 30 16:10 pd-tso-bench -rwxr-xr-x. 1 jenkins jenkins 3.0M Apr 30 16:11 pd-ut -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:10 regions-dump drwxr-sr-x. 4 jenkins jenkins 4.0K May 10 2023 share -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:11 stores-dump -rwxr-xr-x. 1 jenkins jenkins 192M Sep 22 2023 sync_diff_inspector -rwxr-xr-x. 1 jenkins jenkins 208M May 1 10:57 tidb-server -rwxr-xr-x. 1 jenkins jenkins 380M Apr 30 11:15 tiflash -rwxr-xr-x. 1 jenkins jenkins 418M Apr 30 11:29 tikv-server -rwxr-xr-x. 1 jenkins jenkins 2.0M Apr 30 16:11 xprog + ./bin/tidb-server -V Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore + ./bin/pd-server -V Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 + ./bin/tikv-server -V TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + ./bin/tiflash --version TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored + ./bin/sync_diff_inspector --version App Name: sync_diff_inspector v2.0 Release Version: v7.4.0 Git Commit Hash: d671b0840063bc2532941f02e02e12627402844c Git Branch: heads/refs/tags/v7.4.0 UTC Build Time: 2023-09-22 03:51:56 Go Version: go1.21.1 [Pipeline] } [Pipeline] // retry [Pipeline] } [Pipeline] // dir [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache Cache not restored (no such key found) [Pipeline] { [Pipeline] sh + ls -alh ./bin total 8.0K drwxr-sr-x. 2 jenkins jenkins 4.0K May 4 15:12 . drwxr-sr-x. 19 jenkins jenkins 4.0K May 4 15:12 .. + '[' -f ./bin/cdc ']' + make cdc CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-71-g41fc0de5f" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 07:12:49" -X "github.com/pingcap/tiflow/pkg/version.GitHash=41fc0de5f6af46f8da184375c7b63207ef1c184b" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-71-g41fc0de5f"' -o bin/cdc ./cmd/cdc go: downloading github.com/spf13/cobra v1.8.0 go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading github.com/IBM/sarama v1.41.2 go: downloading github.com/xdg/scram v1.0.5 go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/gin-gonic/gin v1.9.1 go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading golang.org/x/net v0.24.0 go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/coreos/go-semver v0.3.1 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading golang.org/x/sync v0.7.0 go: downloading github.com/tinylib/msgp v1.1.6 go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/apache/pulsar-client-go v0.11.0 go: downloading github.com/aws/aws-sdk-go-v2 v1.19.1 go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading github.com/DATA-DOG/go-sqlmock v1.5.0 go: downloading github.com/imdario/mergo v0.3.16 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/google/btree v1.1.2 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/pierrec/lz4/v4 v4.1.18 go: downloading github.com/robfig/cron v1.2.0 go: downloading github.com/hashicorp/golang-lru v0.5.1 go: downloading github.com/stretchr/testify v1.9.0 go: downloading github.com/uber-go/atomic v1.4.0 go: downloading go.etcd.io/etcd/pkg/v3 v3.5.12 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/KimMachineGun/automemlimit v0.2.4 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading github.com/jcmturner/gokrb5/v8 v8.4.4 go: downloading github.com/segmentio/kafka-go v0.4.41-0.20230526171612-f057b1d369cd go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading github.com/google/uuid v1.6.0 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading go.etcd.io/etcd/server/v3 v3.5.12 go: downloading golang.org/x/time v0.5.0 go: downloading go.uber.org/atomic v1.11.0 go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading github.com/swaggo/files v0.0.0-20210815190702-a29dd2bc99b2 go: downloading github.com/swaggo/gin-swagger v1.2.0 go: downloading github.com/soheilhy/cmux v0.1.5 go: downloading github.com/benbjohnson/clock v1.3.5 go: downloading cloud.google.com/go v0.112.2 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/r3labs/diff v1.1.0 go: downloading github.com/gavv/monotime v0.0.0-20190418164738-30dba4353424 go: downloading github.com/YangKeao/seahash v0.0.0-20240229041150-e7bf269c3140 go: downloading github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 go: downloading golang.org/x/sys v0.19.0 go: downloading github.com/go-mysql-org/go-mysql v1.7.1-0.20240314115043-2199dfb0ba98 go: downloading github.com/pingcap/check v0.0.0-20211026125417-57bd13f7b5f0 go: downloading github.com/goccy/go-json v0.10.2 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/phayes/freeport v0.0.0-20180830031419-95f893ade6f2 go: downloading github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading github.com/cenkalti/backoff/v4 v4.2.1 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading github.com/xdg/stringprep v1.0.3 go: downloading golang.org/x/crypto v0.22.0 go: downloading github.com/containerd/cgroups v1.0.4 go: downloading github.com/philhofer/fwd v1.1.1 go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading github.com/stretchr/objx v0.5.2 go: downloading github.com/go-playground/validator/v10 v10.14.0 go: downloading github.com/pelletier/go-toml/v2 v2.0.8 go: downloading github.com/ugorji/go/codec v1.2.11 go: downloading google.golang.org/protobuf v1.33.0 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading github.com/gin-contrib/sse v0.1.0 go: downloading github.com/pingcap/tidb-dashboard v0.0.0-20240326110213-9768844ff5d7 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/aws/smithy-go v1.13.5 go: downloading github.com/swaggo/swag v1.16.3 go: downloading github.com/golang/mock v1.6.0 go: downloading github.com/jcmturner/dnsutils/v2 v2.0.0 go: downloading github.com/jcmturner/gofork v1.7.6 go: downloading github.com/hashicorp/go-uuid v1.0.3 go: downloading golang.org/x/term v0.19.0 go: downloading gorm.io/gorm v1.24.5 go: downloading github.com/golang/protobuf v1.5.4 go: downloading github.com/eapache/go-resiliency v1.4.0 go: downloading github.com/eapache/go-xerial-snappy v0.0.0-20230731223053-c322873962e3 go: downloading github.com/eapache/queue v1.1.0 go: downloading github.com/hashicorp/go-multierror v1.1.1 go: downloading github.com/AthenZ/athenz v1.10.39 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/bits-and-blooms/bitset v1.4.0 go: downloading github.com/linkedin/goavro/v2 v2.11.1 go: downloading github.com/sirupsen/logrus v1.9.3 go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/docker/go-units v0.5.0 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading github.com/xdg-go/scram v1.1.2 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading google.golang.org/api v0.170.0 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda go: downloading github.com/aws/aws-sdk-go-v2/config v1.18.30 go: downloading github.com/aws/aws-sdk-go-v2/credentials v1.13.29 go: downloading github.com/aws/aws-sdk-go-v2/service/glue v1.58.1 go: downloading github.com/jarcoal/httpmock v1.2.0 go: downloading github.com/mailru/easyjson v0.7.7 go: downloading golang.org/x/text v0.14.0 go: downloading github.com/Masterminds/semver v1.5.0 go: downloading github.com/siddontang/go-log v0.0.0-20180807004314-8d05993dda07 go: downloading github.com/siddontang/go v0.0.0-20180604090527-bdc77568d726 go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading github.com/godbus/dbus/v5 v5.0.4 go: downloading github.com/opencontainers/runtime-spec v1.0.2 go: downloading github.com/cilium/ebpf v0.4.0 go: downloading github.com/pierrec/lz4 v2.6.1+incompatible go: downloading github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 go: downloading github.com/grpc-ecosystem/grpc-gateway v1.16.0 go: downloading github.com/tmc/grpc-websocket-proxy v0.0.0-20220101234140-673ab2c3ae75 go: downloading go.etcd.io/bbolt v1.3.9 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.22.0 go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.22.0 go: downloading go.opentelemetry.io/otel/sdk v1.22.0 go: downloading sigs.k8s.io/yaml v1.4.0 go: downloading github.com/gabriel-vasile/mimetype v1.4.2 go: downloading github.com/go-playground/universal-translator v0.18.1 go: downloading github.com/leodido/go-urn v1.2.4 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/cakturk/go-netstat v0.0.0-20200220111822-e5b49efee7a5 go: downloading github.com/joomcode/errorx v1.0.1 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading github.com/jcmturner/aescts/v2 v2.0.0 go: downloading github.com/jcmturner/rpc/v2 v2.0.3 go: downloading github.com/glebarez/sqlite v1.7.0 go: downloading gorm.io/driver/mysql v1.3.3 go: downloading github.com/edwingeng/deque v0.0.0-20191220032131-8596380dee17 go: downloading github.com/hashicorp/errwrap v1.0.0 go: downloading github.com/golang/snappy v0.0.4 go: downloading github.com/jinzhu/now v1.1.5 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading github.com/99designs/keyring v1.2.1 go: downloading github.com/spaolacci/murmur3 v1.1.0 go: downloading golang.org/x/mod v0.17.0 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading github.com/shopspring/decimal v1.3.0 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/KyleBanks/depth v1.2.1 go: downloading github.com/go-openapi/spec v0.21.0 go: downloading golang.org/x/tools v0.20.0 go: downloading github.com/xdg-go/pbkdf2 v1.0.0 go: downloading github.com/xdg-go/stringprep v1.0.4 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 go: downloading github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.6 go: downloading github.com/aws/aws-sdk-go-v2/internal/ini v1.3.37 go: downloading github.com/aws/aws-sdk-go-v2/service/sso v1.12.14 go: downloading github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.14 go: downloading github.com/aws/aws-sdk-go-v2/service/sts v1.20.1 go: downloading github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.36 go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading github.com/gorilla/websocket v1.5.1 go: downloading go.etcd.io/etcd/raft/v3 v3.5.12 go: downloading github.com/xiang90/probing v0.0.0-20221125231312-a49e3df8f510 go: downloading github.com/jonboulle/clockwork v0.4.0 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading go.opentelemetry.io/proto/otlp v1.1.0 go: downloading github.com/go-playground/locales v0.14.1 go: downloading github.com/glebarez/go-sqlite v1.21.2 go: downloading github.com/go-ozzo/ozzo-validation/v4 v4.3.0 go: downloading github.com/jinzhu/inflection v1.0.0 go: downloading github.com/dvsekhvalnov/jose2go v1.5.0 go: downloading github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c go: downloading github.com/mtibben/percent v0.2.1 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/go-logr/logr v1.4.1 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading github.com/go-openapi/jsonpointer v0.21.0 go: downloading github.com/go-openapi/jsonreference v0.21.0 go: downloading github.com/go-openapi/swag v0.23.0 go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.30 go: downloading github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.30 go: downloading github.com/golang-jwt/jwt/v4 v4.5.0 go: downloading go.etcd.io/etcd/client/v2 v2.305.12 go: downloading github.com/ardielle/ardielle-go v1.5.2 go: downloading github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.1 go: downloading github.com/go-logr/stdr v1.2.2 go: downloading modernc.org/libc v1.37.1 go: downloading modernc.org/sqlite v1.27.0 go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/josharian/intern v1.0.0 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/blacktear23/go-proxyprotocol v1.0.6 go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/pingcap/fn v1.0.0 go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/tidwall/btree v1.7.0 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/tiancaiamao/appdash v0.0.0-20181126055449-889f96f722a2 go: downloading github.com/kr/text v0.2.0 go: downloading github.com/sourcegraph/appdash-data v0.0.0-20151005221446-73f23eafcf67 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading github.com/vbauerster/mpb/v7 v7.5.3 go: downloading github.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0 go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading k8s.io/api v0.28.6 go: downloading github.com/emirpasic/gods v1.18.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading github.com/jfcg/sixb v1.3.8 go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d go: downloading github.com/apache/thrift v0.16.0 go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading k8s.io/apimachinery v0.28.6 go: downloading gopkg.in/inf.v0 v0.9.1 go: downloading k8s.io/klog/v2 v2.120.1 go: downloading github.com/google/gofuzz v1.2.0 go: downloading sigs.k8s.io/structured-merge-diff/v4 v4.4.1 go: downloading sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd go: downloading k8s.io/utils v0.0.0-20230726121419-3b25d923346b go: downloading modernc.org/memory v1.7.2 go: downloading modernc.org/mathutil v1.6.0 go: downloading github.com/jmespath/go-jmespath v0.4.0 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/google/s2a-go v0.1.7 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading github.com/felixge/httpsnoop v1.0.4 + '[' -f ./bin/cdc_kafka_consumer ']' + make kafka_consumer CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-71-g41fc0de5f" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 07:14:50" -X "github.com/pingcap/tiflow/pkg/version.GitHash=41fc0de5f6af46f8da184375c7b63207ef1c184b" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-71-g41fc0de5f"' -o bin/cdc_kafka_consumer ./cmd/kafka-consumer/main.go + '[' -f ./bin/cdc_storage_consumer ']' + make storage_consumer CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-71-g41fc0de5f" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 07:14:57" -X "github.com/pingcap/tiflow/pkg/version.GitHash=41fc0de5f6af46f8da184375c7b63207ef1c184b" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-71-g41fc0de5f"' -o bin/cdc_storage_consumer ./cmd/storage-consumer/main.go + '[' -f ./bin/cdc.test ']' + make integration_test_build cd tools/check && GO111MODULE=on go build -mod=mod -o ../bin/failpoint-ctl github.com/pingcap/failpoint/failpoint-ctl go: downloading github.com/pingcap/failpoint v0.0.0-20210316064728-7acb0f0a3dfd go: downloading github.com/sergi/go-diff v1.1.0 CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-71-g41fc0de5f" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 07:15:04" -X "github.com/pingcap/tiflow/pkg/version.GitHash=41fc0de5f6af46f8da184375c7b63207ef1c184b" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-71-g41fc0de5f"' -o bin/cdc_storage_consumer ./cmd/storage-consumer/main.go CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-71-g41fc0de5f" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 07:15:04" -X "github.com/pingcap/tiflow/pkg/version.GitHash=41fc0de5f6af46f8da184375c7b63207ef1c184b" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-71-g41fc0de5f"' -o bin/cdc_kafka_consumer ./cmd/kafka-consumer/main.go CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-71-g41fc0de5f" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 07:15:04" -X "github.com/pingcap/tiflow/pkg/version.GitHash=41fc0de5f6af46f8da184375c7b63207ef1c184b" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-71-g41fc0de5f"' -o bin/cdc_pulsar_consumer ./cmd/pulsar-consumer/main.go CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-71-g41fc0de5f" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 07:15:04" -X "github.com/pingcap/tiflow/pkg/version.GitHash=41fc0de5f6af46f8da184375c7b63207ef1c184b" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-71-g41fc0de5f"' -o bin/oauth2-server ./cmd/oauth2-server/main.go go: downloading github.com/go-oauth2/oauth2/v4 v4.5.2 go: downloading github.com/tidwall/buntdb v1.3.0 go: downloading github.com/tidwall/match v1.1.1 go: downloading github.com/tidwall/gjson v1.14.3 go: downloading github.com/tidwall/grect v0.1.4 go: downloading github.com/tidwall/rtred v0.1.2 go: downloading github.com/tidwall/tinyqueue v0.1.1 go: downloading github.com/tidwall/pretty v1.2.0 $(echo $(for p in $(go list ./... | grep -vE 'vendor|proto|tiflow/tests|integration|testing_utils|pb|pbmock|tiflow/bin'); do echo ${p#"github.com/pingcap/tiflow/"}|grep -v "github.com/pingcap/tiflow"; done) | xargs tools/bin/failpoint-ctl enable >/dev/null) go: downloading github.com/DataDog/zstd v1.5.5 go: downloading github.com/PingCAP-QE/go-sqlsmith v0.0.0-20231213065948-336e064b488d go: downloading github.com/chzyer/readline v1.5.1 go: downloading github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 go: downloading github.com/gogo/gateway v1.1.0 go: downloading github.com/deepmap/oapi-codegen v1.9.0 go: downloading github.com/getkin/kin-openapi v0.80.0 go: downloading github.com/syndtr/goleveldb v1.0.1-0.20210305035536-64b5b1c73954 go: downloading github.com/shurcooL/httpgzip v0.0.0-20190720172056-320755c1c1b0 go: downloading github.com/mattn/go-shellwords v1.0.12 go: downloading github.com/ngaut/log v0.0.0-20210830112240-0124ec040aeb go: downloading go.uber.org/dig v1.13.0 go: downloading go.uber.org/ratelimit v0.2.0 go: downloading github.com/VividCortex/mysqlerr v1.0.0 go: downloading go.uber.org/goleak v1.3.0 go: downloading github.com/bradleyjkemp/grpc-tools v0.2.5 go: downloading github.com/integralist/go-findroot v0.0.0-20160518114804-ac90681525dc go: downloading github.com/jmoiron/sqlx v1.3.3 go: downloading upper.io/db.v3 v3.7.1+incompatible go: downloading github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129 go: downloading github.com/improbable-eng/grpc-web v0.12.0 go: downloading github.com/ghodss/yaml v1.0.0 go: downloading github.com/rs/cors v1.7.0 go: downloading github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f CGO_ENABLED=1 GO111MODULE=on go test -p 3 --race --tags=intest -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-71-g41fc0de5f" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 07:15:04" -X "github.com/pingcap/tiflow/pkg/version.GitHash=41fc0de5f6af46f8da184375c7b63207ef1c184b" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-71-g41fc0de5f"' -c -cover -covermode=atomic \ -coverpkg=github.com/pingcap/tiflow/... \ -o bin/cdc.test github.com/pingcap/tiflow/cmd/cdc \ || { $(echo $(for p in $(go list ./... | grep -vE 'vendor|proto|tiflow/tests|integration|testing_utils|pb|pbmock|tiflow/bin'); do echo ${p#"github.com/pingcap/tiflow/"}|grep -v "github.com/pingcap/tiflow"; done) | xargs tools/bin/failpoint-ctl disable >/dev/null); exit 1; } CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-71-g41fc0de5f" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 07:15:04" -X "github.com/pingcap/tiflow/pkg/version.GitHash=41fc0de5f6af46f8da184375c7b63207ef1c184b" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-71-g41fc0de5f"' -o bin/cdc ./cmd/cdc/main.go \ || { $(echo $(for p in $(go list ./... | grep -vE 'vendor|proto|tiflow/tests|integration|testing_utils|pb|pbmock|tiflow/bin'); do echo ${p#"github.com/pingcap/tiflow/"}|grep -v "github.com/pingcap/tiflow"; done) | xargs tools/bin/failpoint-ctl disable >/dev/null); exit 1; } $(echo $(for p in $(go list ./... | grep -vE 'vendor|proto|tiflow/tests|integration|testing_utils|pb|pbmock|tiflow/bin'); do echo ${p#"github.com/pingcap/tiflow/"}|grep -v "github.com/pingcap/tiflow"; done) | xargs tools/bin/failpoint-ctl disable >/dev/null) + ls -alh ./bin total 1.2G drwxr-sr-x. 2 jenkins jenkins 4.0K May 4 15:19 . drwxr-sr-x. 19 jenkins jenkins 4.0K May 4 15:12 .. -rwxr-xr-x. 1 jenkins jenkins 220M May 4 15:19 cdc -rwxr-xr-x. 1 jenkins jenkins 359M May 4 15:19 cdc.test -rwxr-xr-x. 1 jenkins jenkins 183M May 4 15:15 cdc_kafka_consumer -rwxr-xr-x. 1 jenkins jenkins 183M May 4 15:15 cdc_pulsar_consumer -rwxr-xr-x. 1 jenkins jenkins 182M May 4 15:15 cdc_storage_consumer -rwxr-xr-x. 1 jenkins jenkins 12M May 4 15:15 oauth2-server + ./bin/cdc version Release Version: v8.2.0-alpha-71-g41fc0de5f Git Commit Hash: 41fc0de5f6af46f8da184375c7b63207ef1c184b Git Branch: HEAD UTC Build Time: 2024-05-04 07:15:04 Go Version: go version go1.21.0 linux/amd64 Failpoint Build: true [Pipeline] } Cache saved successfully (binary/pingcap/tiflow/cdc-integration-test/rev-be15534-41fc0de) 1191699456 bytes in 17.38 secs (68567982 bytes/sec) [Pipeline] // cache [Pipeline] cache Cache not restored (no such key found) [Pipeline] { [Pipeline] sh + cp -r ../third_party_download/bin/bin ../third_party_download/bin/etc ../third_party_download/bin/etcdctl ../third_party_download/bin/go-ycsb ../third_party_download/bin/jq ../third_party_download/bin/lib ../third_party_download/bin/libc++.so.1 ../third_party_download/bin/libc++.so.1.0 ../third_party_download/bin/libc++abi.so.1 ../third_party_download/bin/libc++abi.so.1.0 ../third_party_download/bin/libgmssl.so ../third_party_download/bin/libgmssl.so.3 ../third_party_download/bin/libgmssl.so.3.0 ../third_party_download/bin/libtiflash_proxy.so ../third_party_download/bin/minio ../third_party_download/bin/pd-api-bench ../third_party_download/bin/pd-ctl ../third_party_download/bin/pd-heartbeat-bench ../third_party_download/bin/pd-recover ../third_party_download/bin/pd-server ../third_party_download/bin/pd-tso-bench ../third_party_download/bin/pd-ut ../third_party_download/bin/regions-dump ../third_party_download/bin/share ../third_party_download/bin/stores-dump ../third_party_download/bin/sync_diff_inspector ../third_party_download/bin/tidb-server ../third_party_download/bin/tiflash ../third_party_download/bin/tikv-server ../third_party_download/bin/xprog ./bin/ + ls -alh ./bin total 3.0G drwxr-sr-x. 6 jenkins jenkins 4.0K May 4 15:20 . drwxr-sr-x. 19 jenkins jenkins 4.0K May 4 15:12 .. drwxr-sr-x. 2 jenkins jenkins 4.0K May 4 15:20 bin -rwxr-xr-x. 1 jenkins jenkins 220M May 4 15:19 cdc -rwxr-xr-x. 1 jenkins jenkins 359M May 4 15:19 cdc.test -rwxr-xr-x. 1 jenkins jenkins 183M May 4 15:15 cdc_kafka_consumer -rwxr-xr-x. 1 jenkins jenkins 183M May 4 15:15 cdc_pulsar_consumer -rwxr-xr-x. 1 jenkins jenkins 182M May 4 15:15 cdc_storage_consumer drwxr-sr-x. 4 jenkins jenkins 4.0K May 4 15:20 etc -rwxr-xr-x. 1 jenkins jenkins 17M May 4 15:20 etcdctl -rwxr-xr-x. 1 jenkins jenkins 44M May 4 15:20 go-ycsb -rwxr-xr-x. 1 jenkins jenkins 3.8M May 4 15:20 jq drwxr-sr-x. 3 jenkins jenkins 4.0K May 4 15:20 lib lrwxrwxrwx. 1 jenkins jenkins 13 May 4 15:20 libc++.so.1 -> libc++.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 1016K May 4 15:20 libc++.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 16 May 4 15:20 libc++abi.so.1 -> libc++abi.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 358K May 4 15:20 libc++abi.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 13 May 4 15:20 libgmssl.so -> libgmssl.so.3 lrwxrwxrwx. 1 jenkins jenkins 15 May 4 15:20 libgmssl.so.3 -> libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 2.6M May 4 15:20 libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 272M May 4 15:20 libtiflash_proxy.so -rwxr-xr-x. 1 jenkins jenkins 50M May 4 15:20 minio -rwxr-xr-x. 1 jenkins jenkins 12M May 4 15:15 oauth2-server -rwxr-xr-x. 1 jenkins jenkins 37M May 4 15:20 pd-api-bench -rwxr-xr-x. 1 jenkins jenkins 44M May 4 15:20 pd-ctl -rwxr-xr-x. 1 jenkins jenkins 36M May 4 15:20 pd-heartbeat-bench -rwxr-xr-x. 1 jenkins jenkins 32M May 4 15:20 pd-recover -rwxr-xr-x. 1 jenkins jenkins 106M May 4 15:20 pd-server -rwxr-xr-x. 1 jenkins jenkins 26M May 4 15:20 pd-tso-bench -rwxr-xr-x. 1 jenkins jenkins 3.0M May 4 15:20 pd-ut -rwxr-xr-x. 1 jenkins jenkins 32M May 4 15:20 regions-dump drwxr-sr-x. 4 jenkins jenkins 4.0K May 4 15:20 share -rwxr-xr-x. 1 jenkins jenkins 32M May 4 15:20 stores-dump -rwxr-xr-x. 1 jenkins jenkins 192M May 4 15:20 sync_diff_inspector -rwxr-xr-x. 1 jenkins jenkins 208M May 4 15:20 tidb-server -rwxr-xr-x. 1 jenkins jenkins 380M May 4 15:20 tiflash -rwxr-xr-x. 1 jenkins jenkins 418M May 4 15:20 tikv-server -rwxr-xr-x. 1 jenkins jenkins 2.0M May 4 15:20 xprog [Pipeline] } Cache saved successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828/tiflow-cdc) 3727142400 bytes in 68.95 secs (54058025 bytes/sec) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Tests) [Pipeline] parallel [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G00') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G01') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G02') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G03') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G04') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G05') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G06') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G07') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G08') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G09') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G10') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G11') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G12') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G13') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G14') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G15') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G16') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G17') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G00') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G01') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G02') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G03') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G04') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G05') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G06') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G07') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G08') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G09') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G10') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G11') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G12') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G13') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G14') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G15') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G16') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G17') [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j1vs1-n66zh [Pipeline] node Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1828-62dq1-4td68 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1828-cxnnw-bkcrr [Pipeline] podTemplate [Pipeline] { [Pipeline] node Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rwb1z-5xmkw Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rv1ts-k5xvd Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j5hwn-r8817 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1828-9791l-h7cbr Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1828-hlzqq-jkp5b Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1828-dwfjx-p9mf6 Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j1vs1-n66zh is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1828-j1vs1-vpq0z --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "0548597585c94e284c1da5702caa59669232ac56" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1828-j1vs1" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j1vs1-n66zh" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j1vs1-n66zh" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j1vs1-n66zh" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j1vs1-n66zh in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] checkout The recommended git tool is: git Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rv1ts-k5xvd is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1828-rv1ts-qf79n --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "0c87058c1a7ebc37262737ee5d2dbb090ecfde65" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1828-rv1ts" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rv1ts-k5xvd" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rv1ts-k5xvd" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rv1ts-k5xvd" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rv1ts-k5xvd in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] checkout The recommended git tool is: git Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j5hwn-r8817 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1828-j5hwn-0fk88 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "682a1443ba2818d50a29e5440887b8ac36999cb6" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1828-j5hwn" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j5hwn-r8817" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j5hwn-r8817" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j5hwn-r8817" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j5hwn-r8817 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] checkout The recommended git tool is: git Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rwb1z-5xmkw is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1828-rwb1z-p284l --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "1f4f9eedb2633d0516ac6a275300fc64c6201260" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1828-rwb1z" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rwb1z-5xmkw" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rwb1z-5xmkw" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rwb1z-5xmkw" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rwb1z-5xmkw in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] checkout Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1828-w3vlr-26nhw The recommended git tool is: git Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@528d30e1; decorates RemoteLauncher[hudson.remoting.Channel@1b66766:JNLP4-connect connection from 10.233.88.82/10.233.88.82:60460] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 [Pipeline] podTemplate [Pipeline] { [Pipeline] node No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@14cde9ec; decorates RemoteLauncher[hudson.remoting.Channel@46322a79:JNLP4-connect connection from 10.233.68.237/10.233.68.237:41274] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@ffb57d6; decorates RemoteLauncher[hudson.remoting.Channel@47708745:JNLP4-connect connection from 10.233.100.215/10.233.100.215:47932] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1828-62dq1-4td68 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1828-62dq1-z9nwl --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "65eddd1937f2b4284493eac28ee23f0b26f895b2" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1828-62dq1" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-62dq1-4td68" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-62dq1-4td68" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-62dq1-4td68" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1828-62dq1-4td68 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] checkout The recommended git tool is: git Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@68f0faeb; decorates RemoteLauncher[hudson.remoting.Channel@a29b2d2:JNLP4-connect connection from 10.233.107.145/10.233.107.145:36060] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1828-kc51m-zptlq [Pipeline] node > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1828-9791l-h7cbr is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1828-9791l-51ddk --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "b76875f906a755901467eb3d846e892974bdd85c" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1828-9791l" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-9791l-h7cbr" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-9791l-h7cbr" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-9791l-h7cbr" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1828-9791l-h7cbr in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] checkout The recommended git tool is: git No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@7345a647; decorates RemoteLauncher[hudson.remoting.Channel@7877676b:JNLP4-connect connection from 10.233.90.198/10.233.90.198:58172] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1828-cxnnw-bkcrr is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1828-cxnnw-r09kp --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "95c3dcc491e5c4c28e3963df70fe9e2044a92116" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1828-cxnnw" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-cxnnw-bkcrr" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-cxnnw-bkcrr" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-cxnnw-bkcrr" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1828-cxnnw-bkcrr in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] checkout The recommended git tool is: git Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1828-hlzqq-jkp5b is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1828-hlzqq-3n1v2 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "c35414f6136ff539b41ab2765d806550182e77b1" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1828-hlzqq" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-hlzqq-jkp5b" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-hlzqq-jkp5b" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-hlzqq-jkp5b" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1828-hlzqq-jkp5b in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] { Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1828-3vxcl-phpbf [Pipeline] node [Pipeline] checkout No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@556ad239; decorates RemoteLauncher[hudson.remoting.Channel@1b052adf:JNLP4-connect connection from 10.233.108.197/10.233.108.197:34940] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 The recommended git tool is: git Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@422c506f; decorates RemoteLauncher[hudson.remoting.Channel@b0b2e60:JNLP4-connect connection from 10.233.97.2/10.233.97.2:54716] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1828-dwfjx-p9mf6 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1828-dwfjx-jq88f --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "87679de6855a6e69957adb93c1666f9a6f67bab9" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1828-dwfjx" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-dwfjx-p9mf6" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-dwfjx-p9mf6" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-dwfjx-p9mf6" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1828-dwfjx-p9mf6 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] withEnv [Pipeline] { [Pipeline] { [Pipeline] container [Pipeline] { Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) [Pipeline] podTemplate [Pipeline] { [Pipeline] checkout [Pipeline] node The recommended git tool is: git [Pipeline] withEnv Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7tmjt-f026b [Pipeline] { [Pipeline] container [Pipeline] { Commit message: "fix(br): use failpoint tidb-server instead (#2951)" [Pipeline] stage [Pipeline] { (Test) [Pipeline] withEnv [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] stage > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 [Pipeline] { (Test) Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7vp46-wk2j4 [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN Avoid second fetch [Pipeline] stage Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) [Pipeline] { (Test) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@8bc3a94; decorates RemoteLauncher[hudson.remoting.Channel@32a12db4:JNLP4-connect connection from 10.233.86.229/10.233.86.229:58688] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1828-mmjq9-38gpz No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@551cccf5; decorates RemoteLauncher[hudson.remoting.Channel@d3bade8:JNLP4-connect connection from 10.233.105.51/10.233.105.51:52668] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1828-kc51m-zptlq is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1828-kc51m-fljjf --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "affcdf415cede63f1a69a5e0497be666288c20dd" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1828-kc51m" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-kc51m-zptlq" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-kc51m-zptlq" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-kc51m-zptlq" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1828-kc51m-zptlq in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1828-3vxcl-phpbf is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1828-3vxcl-rx9xw --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "549b72015ce810da2f71fc6de5c2d85a14b4df77" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1828-3vxcl" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-3vxcl-phpbf" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-3vxcl-phpbf" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-3vxcl-phpbf" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1828-3vxcl-phpbf in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7vp46-wk2j4 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1828-7vp46-n2g8w --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "215cf19d709f32fb3326e9846128e5f1b7edca42" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1828-7vp46" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7vp46-wk2j4" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7vp46-wk2j4" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7vp46-wk2j4" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7vp46-wk2j4 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7tmjt-f026b is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1828-7tmjt-6pp0j --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "f618917441318512d0811bf456a3c12e5a80477d" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1828-7tmjt" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7tmjt-f026b" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7tmjt-f026b" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7tmjt-f026b" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7tmjt-f026b in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1828-mmjq9-38gpz is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1828-mmjq9-90s2g --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "71a966685af46aed197fd8e32e4feb75b08706a1" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1828-mmjq9" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-mmjq9-38gpz" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-mmjq9-38gpz" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-mmjq9-38gpz" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1828-mmjq9-38gpz in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828/tiflow-cdc) 3727142400 bytes in 7.61 secs (490055705 bytes/sec) [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] sh Still waiting to schedule task โ€˜pingcap-tiflow-pull-cdc-integration-kafka-test-1828-w3vlr-26nhwโ€™ is offline [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] node [Pipeline] node [Pipeline] node + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1828-88ts5-58zqs Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j56gh-stdlt Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1828-39dcv-ksc05 Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1828-39dcv-ksc05 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1828-39dcv-8qhpk --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "9c63bde507def7b493a588c6759e5513e74a2098" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1828-39dcv" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-39dcv-ksc05" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-39dcv-ksc05" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-39dcv-ksc05" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1828-39dcv-ksc05 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1828-88ts5-58zqs is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1828-88ts5-84q0l --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "edec403621ee2eb4d812f55fcbe0fbe0e1b746e6" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1828-88ts5" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-88ts5-58zqs" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-88ts5-58zqs" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-88ts5-58zqs" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1828-88ts5-58zqs in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j56gh-stdlt is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1828-j56gh-nwrhj --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "eecae987575697e29b5487de298cc3adf9b5e651" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1828-j56gh" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j56gh-stdlt" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j56gh-stdlt" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j56gh-stdlt" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j56gh-stdlt in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828/tiflow-cdc) 3727142400 bytes in 12.66 secs (294343816 bytes/sec) [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] sh [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828/tiflow-cdc) 3727142400 bytes in 7.77 secs (479482638 bytes/sec) [Pipeline] { [Pipeline] checkout [Pipeline] checkout The recommended git tool is: git [Pipeline] container [Pipeline] { The recommended git tool is: git [Pipeline] checkout [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git [Pipeline] stage [Pipeline] { (Test) The recommended git tool is: git [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] } [Pipeline] sh No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@667b6b20; decorates RemoteLauncher[hudson.remoting.Channel@200e8382:JNLP4-connect connection from 10.233.72.70/10.233.72.70:58234] will be ignored (a typical symptom is the Git executable not being run inside a designated container) No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@3eeb86f6; decorates RemoteLauncher[hudson.remoting.Channel@25441817:JNLP4-connect connection from 10.233.66.61/10.233.66.61:57748] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@69f5d9e9; decorates RemoteLauncher[hudson.remoting.Channel@1cf48cec:JNLP4-connect connection from 10.233.127.18/10.233.127.18:59940] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@b12d500; decorates RemoteLauncher[hudson.remoting.Channel@59378c06:JNLP4-connect connection from 10.233.70.218/10.233.70.218:32980] will be ignored (a typical symptom is the Git executable not being run inside a designated container) [Pipeline] timeout Timeout set to expire in 45 min Cloning the remote Git repository Using shallow clone with depth 1 [Pipeline] { No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@439b05c8; decorates RemoteLauncher[hudson.remoting.Channel@159d8f8d:JNLP4-connect connection from 10.233.71.19/10.233.71.19:59892] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] checkout [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git The recommended git tool is: git [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // container [Pipeline] sh [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G01 Run cases: open_protocol_handle_key_only PROW_JOB_ID=b2dca6a5-5209-4287-bddb-a889752d92c6 JENKINS_NODE_COOKIE=73f9a330-e343-4e8b-94a8-21e9538d756e BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rv1ts-k5xvd HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786654730638856195","prowjobid":"b2dca6a5-5209-4287-bddb-a889752d92c6","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"41fc0de5f6af46f8da184375c7b63207ef1c184b","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/41fc0de5f6af46f8da184375c7b63207ef1c184b","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1828 TEST_GROUP=G01 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786654730638856195 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-rv1ts GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rv1ts-k5xvd pingcap_tiflow_pull_cdc_integration_kafka_test_1828-rv1ts GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rv1ts-k5xvd GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1828 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/open_protocol_handle_key_only/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@4f16ceb6; decorates RemoteLauncher[hudson.remoting.Channel@67f29cd6:JNLP4-connect connection from 10.233.93.61/10.233.93.61:33750] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@6e063ad3; decorates RemoteLauncher[hudson.remoting.Channel@76fa45c7:JNLP4-connect connection from 10.233.123.200/10.233.123.200:53594] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@3ad14512; decorates RemoteLauncher[hudson.remoting.Channel@4f2c8653:JNLP4-connect connection from 10.233.84.197/10.233.84.197:45708] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 start tidb cluster in /tmp/tidb_cdc_test/open_protocol_handle_key_only Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09ee8d040003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rv1ts-k5xvd, pid:1365, start at 2024-05-04 15:22:30.082658856 +0800 CST m=+5.154769755 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:24:30.092 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:22:30.081 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:12:30.081 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09ee8d040003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rv1ts-k5xvd, pid:1365, start at 2024-05-04 15:22:30.082658856 +0800 CST m=+5.154769755 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:24:30.092 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:22:30.081 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:12:30.081 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09ee8dc80007 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rv1ts-k5xvd, pid:1453, start at 2024-05-04 15:22:30.136466518 +0800 CST m=+5.156514112 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:24:30.143 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:22:30.130 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:12:30.130 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/log/error.log arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash-proxy.toml"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.open_protocol_handle_key_only.cli.2848.out cli tso query --pd=http://127.0.0.1:2379 Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828/tiflow-cdc) 3727142400 bytes in 21.01 secs (177395304 bytes/sec) [Pipeline] { [Pipeline] cache + set +x + tso='449526458897399809 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449526458897399809 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 15:22:35 CST 2024] <<<<<< START cdc server in open_protocol_handle_key_only case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.open_protocol_handle_key_only.28922894.out server --log-file /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:22:38 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/eda8a001-b3d5-40c3-bf1b-9d481a69b870 {"id":"eda8a001-b3d5-40c3-bf1b-9d481a69b870","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807355} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427b774bcf eda8a001-b3d5-40c3-bf1b-9d481a69b870 /tidb/cdc/default/default/upstream/7365041450735060997 {"id":7365041450735060997,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/eda8a001-b3d5-40c3-bf1b-9d481a69b870 {"id":"eda8a001-b3d5-40c3-bf1b-9d481a69b870","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807355} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427b774bcf eda8a001-b3d5-40c3-bf1b-9d481a69b870 /tidb/cdc/default/default/upstream/7365041450735060997 {"id":7365041450735060997,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/eda8a001-b3d5-40c3-bf1b-9d481a69b870 {"id":"eda8a001-b3d5-40c3-bf1b-9d481a69b870","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807355} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427b774bcf eda8a001-b3d5-40c3-bf1b-9d481a69b870 /tidb/cdc/default/default/upstream/7365041450735060997 {"id":7365041450735060997,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.open_protocol_handle_key_only.cli.2943.out cli changefeed create --start-ts=449526458897399809 '--sink-uri=kafka://127.0.0.1:9092/open-protocol-handle-key-only?protocol=open-protocol&max-message-bytes=800&kafka-version=2.4.1' --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/open_protocol_handle_key_only/conf/changefeed.toml Create changefeed successfully! ID: e4e8342f-6ff6-41c7-b255-f8b3e34dad0a Info: {"upstream_id":7365041450735060997,"namespace":"default","id":"e4e8342f-6ff6-41c7-b255-f8b3e34dad0a","sink_uri":"kafka://127.0.0.1:9092/open-protocol-handle-key-only?protocol=open-protocol\u0026max-message-bytes=800\u0026kafka-version=2.4.1","create_time":"2024-05-04T15:22:38.570524457+08:00","start_ts":449526458897399809,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"kafka_config":{"large_message_handle":{"large_message_handle_option":"handle-key-only","large_message_handle_compression":"lz4","claim_check_storage_uri":""}},"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-71-g41fc0de5f","resolved_ts":449526458897399809,"checkpoint_ts":449526458897399809,"checkpoint_time":"2024-05-04 15:22:33.582"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... + set +x table test.finish_mark not exists for 1-th check, retry later table test.finish_mark not exists for 2-th check, retry later table test.finish_mark exists check diff failed 1-th time, retry later check diff failed 2-th time, retry later check diff failed 3-th time, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828/tiflow-cdc) 3727142400 bytes in 15.66 secs (237932810 bytes/sec) [Pipeline] { [Pipeline] cache check diff failed 4-th time, retry later check diff failed 5-th time, retry later check diff failed 6-th time, retry later check diff failed 7-th time, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828/tiflow-cdc) 3727142400 bytes in 6.48 secs (574868833 bytes/sec) [Pipeline] { [Pipeline] cache check diff failed 8-th time, retry later check diff failed 9-th time, retry later check diff failed 10-th time, retry later check diff failed at last A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table data of ``test`.`finish_mark`` ... equivalent _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/0 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log' cat: /tmp/tidb_cdc_test/open_protocol_handle_key_only/sync_diff/output/sync_diff.log: No such file or directory /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/open_protocol_handle_key_only/run.sh: line 1: 2982 Killed cdc_kafka_consumer --upstream-uri $SINK_URI --downstream-uri="mysql://root@127.0.0.1:3306/?safe-mode=true&batch-dml-enable=false" --upstream-tidb-dsn="root@tcp(${UP_TIDB_HOST}:${UP_TIDB_PORT})/?" --config="$CUR/conf/changefeed.toml" 2>&1 Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828/tiflow-cdc) 3727142400 bytes in 19.90 secs (187263315 bytes/sec) [Pipeline] { [Pipeline] cache Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828/tiflow-cdc) 3727142400 bytes in 8.62 secs (432564744 bytes/sec) [Pipeline] { [Pipeline] cache Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828/tiflow-cdc) 3727142400 bytes in 17.58 secs (212004288 bytes/sec) [Pipeline] { [Pipeline] // timeout [Pipeline] } [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] // container [Pipeline] sh [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G05 Run cases: charset_gbk ddl_manager multi_source PROW_JOB_ID=b2dca6a5-5209-4287-bddb-a889752d92c6 JENKINS_NODE_COOKIE=7333d4fd-66ec-4ebc-bf12-d4ddf2acf668 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j5hwn-r8817 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786654730638856195","prowjobid":"b2dca6a5-5209-4287-bddb-a889752d92c6","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"41fc0de5f6af46f8da184375c7b63207ef1c184b","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/41fc0de5f6af46f8da184375c7b63207ef1c184b","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1828 TEST_GROUP=G05 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786654730638856195 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-j5hwn GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-j5hwn pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j5hwn-r8817 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j5hwn-r8817 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1828 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/charset_gbk/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] // container [Pipeline] sh [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G02 Run cases: consistent_replicate_ddl consistent_replicate_gbk consistent_replicate_nfs consistent_replicate_storage_file consistent_replicate_storage_file_large_value consistent_replicate_storage_s3 consistent_partition_table kafka_big_messages_v2 multi_tables_ddl_v2 multi_topics_v2 storage_cleanup csv_storage_basic csv_storage_multi_tables_ddl csv_storage_partition_table PROW_JOB_ID=b2dca6a5-5209-4287-bddb-a889752d92c6 JENKINS_NODE_COOKIE=9d2699c8-2456-4b9c-9f51-b19804000413 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j1vs1-n66zh HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786654730638856195","prowjobid":"b2dca6a5-5209-4287-bddb-a889752d92c6","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"41fc0de5f6af46f8da184375c7b63207ef1c184b","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/41fc0de5f6af46f8da184375c7b63207ef1c184b","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1828 TEST_GROUP=G02 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786654730638856195 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-j1vs1 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-j1vs1 pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j1vs1-n66zh GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j1vs1-n66zh GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1828 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_ddl/run.sh using Sink-Type: kafka... <<================= [Sat May 4 15:23:57 CST 2024] <<<<<< run test case consistent_replicate_ddl success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] } [Pipeline] // cache [Pipeline] } [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } start tidb cluster in /tmp/tidb_cdc_test/charset_gbk Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... [Pipeline] // timeout [Pipeline] } [Pipeline] // dir Post stage [Pipeline] sh [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { + ls /tmp/tidb_cdc_test/ cov.open_protocol_handle_key_only.cli.2848.out cov.open_protocol_handle_key_only.cli.2943.out open_protocol_handle_key_only sql_res.open_protocol_handle_key_only.txt ++ find /tmp/tidb_cdc_test/ -type f -name '*.log' + tar -cvzf log-G01.tar.gz /tmp/tidb_cdc_test/open_protocol_handle_key_only/sync_diff_inspector.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tidb-slow.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0007/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0002/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0006/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0004/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0005/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0000/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0003/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0001/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv3/db/000005.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv_down.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/log/server.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/log/proxy.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/log/error.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/db/proxy/db/000005.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tidb_down.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/down_pd/hot-region/000001.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/down_pd/region-meta/000001.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/down_pd.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv_down/db/000005.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv1.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv1/db/000005.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tidb_other.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/pd1/hot-region/000001.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/pd1/region-meta/000001.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tidb.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/pd1.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_kafka_consumer.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv2/db/000005.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv2.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv3.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/stdout.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log tar: Removing leading `/' from member names /tmp/tidb_cdc_test/open_protocol_handle_key_only/sync_diff_inspector.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tidb-slow.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0007/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0002/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0006/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0004/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0005/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0000/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0003/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0001/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv3/db/000005.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv_down.log [Pipeline] { [Pipeline] { [Pipeline] // container [Pipeline] // container Verifying downstream PD is started... [Pipeline] // container /tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/log/server.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/log/proxy.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/log/error.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/db/proxy/db/000005.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tidb_down.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/down_pd/hot-region/000001.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/down_pd/region-meta/000001.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/down_pd.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv_down/db/000005.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv1.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv1/db/000005.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tidb_other.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/pd1/hot-region/000001.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/pd1/region-meta/000001.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tidb.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/pd1.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_kafka_consumer.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv2/db/000005.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv2.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv3.log [Pipeline] // container [Pipeline] // container [Pipeline] sh /tmp/tidb_cdc_test/open_protocol_handle_key_only/stdout.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log + ls -alh log-G01.tar.gz -rw-r--r-- 1 jenkins jenkins 3.1M May 4 15:23 log-G01.tar.gz [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G00 [Pipeline] sh Run cases: bdr_mode capture_suicide_while_balance_table syncpoint hang_sink_suicide server_config_compatibility changefeed_dup_error_restart kafka_big_messages kafka_compression kafka_messages kafka_sink_error_resume mq_sink_lost_callback mq_sink_dispatcher kafka_column_selector kafka_column_selector_avro debezium lossy_ddl storage_csv_update PROW_JOB_ID=b2dca6a5-5209-4287-bddb-a889752d92c6 JENKINS_NODE_COOKIE=dd1ef842-bf91-4df4-aa8c-990a9d031319 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rwb1z-5xmkw HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786654730638856195","prowjobid":"b2dca6a5-5209-4287-bddb-a889752d92c6","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"41fc0de5f6af46f8da184375c7b63207ef1c184b","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/41fc0de5f6af46f8da184375c7b63207ef1c184b","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1828 TEST_GROUP=G00 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786654730638856195 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-rwb1z GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-rwb1z pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rwb1z-5xmkw GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rwb1z-5xmkw GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1828 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/bdr_mode/run.sh using Sink-Type: kafka... <<================= [Sat May 4 15:24:00 CST 2024] <<<<<< run test case bdr_mode success! >>>>>> + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G03 Run cases: row_format drop_many_tables processor_stop_delay partition_table PROW_JOB_ID=b2dca6a5-5209-4287-bddb-a889752d92c6 JENKINS_NODE_COOKIE=cf077ff5-f7ab-4952-ad10-fe995cb035aa BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-62dq1-4td68 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786654730638856195","prowjobid":"b2dca6a5-5209-4287-bddb-a889752d92c6","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"41fc0de5f6af46f8da184375c7b63207ef1c184b","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/41fc0de5f6af46f8da184375c7b63207ef1c184b","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1828 TEST_GROUP=G03 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786654730638856195 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-62dq1 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-62dq1-4td68 pingcap_tiflow_pull_cdc_integration_kafka_test_1828-62dq1 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-62dq1-4td68 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1828 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/row_format/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G07 Run cases: kv_client_stream_reconnect cdc split_region PROW_JOB_ID=b2dca6a5-5209-4287-bddb-a889752d92c6 JENKINS_NODE_COOKIE=d531faae-2a47-4a66-9264-3c79a156b2f1 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-9791l-h7cbr HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786654730638856195","prowjobid":"b2dca6a5-5209-4287-bddb-a889752d92c6","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"41fc0de5f6af46f8da184375c7b63207ef1c184b","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/41fc0de5f6af46f8da184375c7b63207ef1c184b","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1828 TEST_GROUP=G07 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786654730638856195 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-9791l GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-9791l pingcap-tiflow-pull-cdc-integration-kafka-test-1828-9791l-h7cbr GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-9791l-h7cbr GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1828 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kv_client_stream_reconnect/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G04 Run cases: foreign_key ddl_puller_lag ddl_only_block_related_table changefeed_auto_stop PROW_JOB_ID=b2dca6a5-5209-4287-bddb-a889752d92c6 JENKINS_NODE_COOKIE=b99ad3ae-e908-4536-8f1c-24c8e18bd1bf BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-cxnnw-bkcrr HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786654730638856195","prowjobid":"b2dca6a5-5209-4287-bddb-a889752d92c6","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"41fc0de5f6af46f8da184375c7b63207ef1c184b","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/41fc0de5f6af46f8da184375c7b63207ef1c184b","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1828 TEST_GROUP=G04 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786654730638856195 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-cxnnw GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-cxnnw pingcap-tiflow-pull-cdc-integration-kafka-test-1828-cxnnw-bkcrr GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-cxnnw-bkcrr GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1828 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/foreign_key/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] // container [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G08 Run cases: processor_err_chan changefeed_reconstruct multi_capture synced_status_with_redo PROW_JOB_ID=b2dca6a5-5209-4287-bddb-a889752d92c6 JENKINS_NODE_COOKIE=8a852be9-ce50-4296-84d4-fddb19b9c357 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-hlzqq-jkp5b HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786654730638856195","prowjobid":"b2dca6a5-5209-4287-bddb-a889752d92c6","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"41fc0de5f6af46f8da184375c7b63207ef1c184b","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/41fc0de5f6af46f8da184375c7b63207ef1c184b","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1828 TEST_GROUP=G08 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786654730638856195 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-hlzqq GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-hlzqq pingcap-tiflow-pull-cdc-integration-kafka-test-1828-hlzqq-jkp5b GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-hlzqq-jkp5b GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1828 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_err_chan/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Starting Upstream TiKV... [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [Pipeline] cache + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G09 Run cases: gc_safepoint changefeed_pause_resume cli_with_auth savepoint synced_status PROW_JOB_ID=b2dca6a5-5209-4287-bddb-a889752d92c6 JENKINS_NODE_COOKIE=cabddef3-240d-4eb4-80ee-672befe2b2ab BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-dwfjx-p9mf6 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786654730638856195","prowjobid":"b2dca6a5-5209-4287-bddb-a889752d92c6","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"41fc0de5f6af46f8da184375c7b63207ef1c184b","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/41fc0de5f6af46f8da184375c7b63207ef1c184b","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1828 TEST_GROUP=G09 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786654730638856195 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-dwfjx GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-dwfjx pingcap-tiflow-pull-cdc-integration-kafka-test-1828-dwfjx-p9mf6 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-dwfjx-p9mf6 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1828 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/gc_safepoint/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_gbk/run.sh using Sink-Type: kafka... <<================= * About to connect() to 127.0.0.1 port 24927 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:24927; Connection refused * Closing connection 0 You are running an older version of MinIO released 3 years ago Update: Run `mc admin update` Attempting encryption of all config, IAM users and policies on MinIO backend Endpoint: http://127.0.0.1:24927 Object API (Amazon S3 compatible): Go: https://docs.min.io/docs/golang-client-quickstart-guide Java: https://docs.min.io/docs/java-client-quickstart-guide Python: https://docs.min.io/docs/python-client-quickstart-guide JavaScript: https://docs.min.io/docs/javascript-client-quickstart-guide .NET: https://docs.min.io/docs/dotnet-client-quickstart-guide * About to connect() to 127.0.0.1 port 24927 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 24927 (#0) > GET / HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:24927 > Accept: */* > < HTTP/1.1 403 Forbidden < Accept-Ranges: bytes < Content-Length: 226 < Content-Security-Policy: block-all-mixed-content < Content-Type: application/xml < Server: MinIO/RELEASE.2020-07-27T18-37-02Z < Vary: Origin < X-Amz-Request-Id: 17CC3848AAE96CB9 < X-Xss-Protection: 1; mode=block < Date: Sat, 04 May 2024 07:24:02 GMT < { [data not shown] * Connection #0 to host 127.0.0.1 left intact Bucket 's3://logbucket/' created [Sat May 4 15:24:02 CST 2024] <<<<<< run test case consistent_replicate_gbk success! >>>>>> Exiting on signal: INTERRUPT Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/capture_suicide_while_balance_table/run.sh using Sink-Type: kafka... <<================= [Sat May 4 15:24:03 CST 2024] <<<<<< run test case capture_suicide_while_balance_table success! >>>>>> start tidb cluster in /tmp/tidb_cdc_test/processor_err_chan Starting Upstream PD... start tidb cluster in /tmp/tidb_cdc_test/row_format Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/foreign_key Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Verifying downstream PD is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_nfs/run.sh using Sink-Type: kafka... <<================= [Sat May 4 15:24:06 CST 2024] <<<<<< run test case consistent_replicate_nfs success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) start tidb cluster in /tmp/tidb_cdc_test/gc_safepoint Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/syncpoint/run.sh using Sink-Type: kafka... <<================= kafka downstream isn't support syncpoint record [Sat May 4 15:24:06 CST 2024] <<<<<< run test case syncpoint success! >>>>>> Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) start tidb cluster in /tmp/tidb_cdc_test/kv_client_stream_reconnect Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_storage_file/run.sh using Sink-Type: kafka... <<================= [Sat May 4 15:24:09 CST 2024] <<<<<< run test case consistent_replicate_storage_file success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f48d900013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j5hwn-r8817, pid:1298, start at 2024-05-04 15:24:08.45906318 +0800 CST m=+5.159270807 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:08.467 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:08.470 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:08.470 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f48d900013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j5hwn-r8817, pid:1298, start at 2024-05-04 15:24:08.45906318 +0800 CST m=+5.159270807 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:08.467 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:08.470 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:08.470 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f48f140009 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j5hwn-r8817, pid:1378, start at 2024-05-04 15:24:08.525074157 +0800 CST m=+5.172580221 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:08.531 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:08.517 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:08.517 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/charset_gbk/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/charset_gbk/tiflash/log/error.log arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/charset_gbk/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/charset_gbk/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/charset_gbk/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_storage_file_large_value/run.sh using Sink-Type: kafka... <<================= [Sat May 4 15:24:12 CST 2024] <<<<<< run test case consistent_replicate_storage_file_large_value success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Sat May 4 15:24:13 CST 2024] <<<<<< START cdc server in charset_gbk case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.charset_gbk.28562858.out server --log-file /tmp/tidb_cdc_test/charset_gbk/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/charset_gbk/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f4dd44000c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-cxnnw-bkcrr, pid:1336, start at 2024-05-04 15:24:13.532638047 +0800 CST m=+5.161787100 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:13.539 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:13.521 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:13.521 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f4dd44000c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-cxnnw-bkcrr, pid:1336, start at 2024-05-04 15:24:13.532638047 +0800 CST m=+5.161787100 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:13.539 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:13.521 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:13.521 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f4dd500005 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-cxnnw-bkcrr, pid:1412, start at 2024-05-04 15:24:13.528129281 +0800 CST m=+5.087837704 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:13.534 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:13.524 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:13.524 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/foreign_key/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/foreign_key/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/foreign_key/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/foreign_key/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/foreign_key/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/hang_sink_suicide/run.sh using Sink-Type: kafka... <<================= [Sat May 4 15:24:14 CST 2024] <<<<<< run test case hang_sink_suicide success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f4e2cc0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-62dq1-4td68, pid:1287, start at 2024-05-04 15:24:13.903001494 +0800 CST m=+5.052534001 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:13.908 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:13.875 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:13.875 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f4e2cc0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-62dq1-4td68, pid:1287, start at 2024-05-04 15:24:13.903001494 +0800 CST m=+5.052534001 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:13.908 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:13.875 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:13.875 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f4e3740015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-62dq1-4td68, pid:1376, start at 2024-05-04 15:24:13.960891128 +0800 CST m=+5.054249427 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:13.966 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:13.967 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:13.967 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/row_format/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/row_format/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/row_format/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/row_format/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/row_format/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_storage_s3/run.sh using Sink-Type: kafka... <<================= * About to connect() to 127.0.0.1 port 24927 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:24927; Connection refused * Closing connection 0 You are running an older version of MinIO released 3 years ago Update: Run `mc admin update` Attempting encryption of all config, IAM users and policies on MinIO backend VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f4fed40014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-hlzqq-jkp5b, pid:1289, start at 2024-05-04 15:24:15.703607347 +0800 CST m=+5.093064922 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:15.710 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:15.669 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:15.669 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f4fed40014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-hlzqq-jkp5b, pid:1289, start at 2024-05-04 15:24:15.703607347 +0800 CST m=+5.093064922 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:15.710 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:15.669 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:15.669 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f4ff900009 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-hlzqq-jkp5b, pid:1373, start at 2024-05-04 15:24:15.726820301 +0800 CST m=+5.062988657 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:15.733 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:15.716 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:15.716 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/processor_err_chan/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/processor_err_chan/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/processor_err_chan/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/processor_err_chan/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/processor_err_chan/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Endpoint: http://127.0.0.1:24927 Object API (Amazon S3 compatible): Go: https://docs.min.io/docs/golang-client-quickstart-guide Java: https://docs.min.io/docs/java-client-quickstart-guide Python: https://docs.min.io/docs/python-client-quickstart-guide JavaScript: https://docs.min.io/docs/javascript-client-quickstart-guide .NET: https://docs.min.io/docs/dotnet-client-quickstart-guide + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.row_format.cli.2816.out cli tso query --pd=http://127.0.0.1:2379 + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.foreign_key.cli.2807.out cli tso query --pd=http://127.0.0.1:2379 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f50e14000e Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-dwfjx-p9mf6, pid:1396, start at 2024-05-04 15:24:16.659266831 +0800 CST m=+5.568126638 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:16.666 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:16.645 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:16.645 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:24:17 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3c88dc26-b99a-4df7-8f72-681be2809457 {"id":"3c88dc26-b99a-4df7-8f72-681be2809457","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807454} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427cf962d0 3c88dc26-b99a-4df7-8f72-681be2809457 /tidb/cdc/default/default/upstream/7365041865870349523 {"id":7365041865870349523,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3c88dc26-b99a-4df7-8f72-681be2809457 {"id":"3c88dc26-b99a-4df7-8f72-681be2809457","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807454} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427cf962d0 3c88dc26-b99a-4df7-8f72-681be2809457 /tidb/cdc/default/default/upstream/7365041865870349523 {"id":7365041865870349523,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3c88dc26-b99a-4df7-8f72-681be2809457 {"id":"3c88dc26-b99a-4df7-8f72-681be2809457","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807454} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427cf962d0 3c88dc26-b99a-4df7-8f72-681be2809457 /tidb/cdc/default/default/upstream/7365041865870349523 {"id":7365041865870349523,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: 2da23775-a08d-4853-87a2-a1f30676d352 Info: {"upstream_id":7365041865870349523,"namespace":"default","id":"2da23775-a08d-4853-87a2-a1f30676d352","sink_uri":"mysql://normal:xxxxx@127.0.0.1:3306/","create_time":"2024-05-04T15:24:17.76131728+08:00","start_ts":449526485134868481,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-71-g41fc0de5f","resolved_ts":449526485134868481,"checkpoint_ts":449526485134868481,"checkpoint_time":"2024-05-04 15:24:13.670"} [Sat May 4 15:24:17 CST 2024] <<<<<< START kafka consumer in charset_gbk case >>>>>> * About to connect() to 127.0.0.1 port 24927 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 24927 (#0) > GET / HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:24927 > Accept: */* > < HTTP/1.1 403 Forbidden < Accept-Ranges: bytes < Content-Length: 226 < Content-Security-Policy: block-all-mixed-content < Content-Type: application/xml < Server: MinIO/RELEASE.2020-07-27T18-37-02Z < Vary: Origin < X-Amz-Request-Id: 17CC384C3960B791 < X-Xss-Protection: 1; mode=block < Date: Sat, 04 May 2024 07:24:18 GMT < { [data not shown] * Connection #0 to host 127.0.0.1 left intact Bucket 's3://logbucket/' created [Sat May 4 15:24:18 CST 2024] <<<<<< run test case consistent_replicate_storage_s3 success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Exiting on signal: INTERRUPT + set +x + tso='449526486026420225 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449526486026420225 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 15:24:18 CST 2024] <<<<<< START cdc server in foreign_key case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.foreign_key.28422844.out server --log-file /tmp/tidb_cdc_test/foreign_key/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/foreign_key/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/server_config_compatibility/run.sh using Sink-Type: kafka... <<================= [Sat May 4 15:24:18 CST 2024] <<<<<< run test case server_config_compatibility success! >>>>>> + set +x + tso='449526486132326401 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449526486132326401 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 15:24:18 CST 2024] <<<<<< START cdc server in row_format case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) + GO_FAILPOINTS= + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.row_format.28652867.out server --log-file /tmp/tidb_cdc_test/row_format/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/row_format/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f50e14000e Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-dwfjx-p9mf6, pid:1396, start at 2024-05-04 15:24:16.659266831 +0800 CST m=+5.568126638 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:16.666 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:16.645 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:16.645 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f50f980013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-dwfjx-p9mf6, pid:1481, start at 2024-05-04 15:24:16.758723734 +0800 CST m=+5.610810549 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:16.765 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:16.742 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:16.742 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f52484000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-9791l-h7cbr, pid:1290, start at 2024-05-04 15:24:18.099631319 +0800 CST m=+5.225829081 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:18.106 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:18.081 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:18.081 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/gc_safepoint/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/gc_safepoint/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/gc_safepoint/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/gc_safepoint/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/gc_safepoint/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f52484000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-9791l-h7cbr, pid:1290, start at 2024-05-04 15:24:18.099631319 +0800 CST m=+5.225829081 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:18.106 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:18.081 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:18.081 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f524740014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-9791l-h7cbr, pid:1372, start at 2024-05-04 15:24:18.117718355 +0800 CST m=+5.184636088 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:18.124 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:18.127 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:18.127 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/log/error.log arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [Sat May 4 15:24:21 CST 2024] <<<<<< START cdc server in gc_safepoint case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/pkg/txnutil/gc/InjectGcSafepointUpdateInterval=return(500)' + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.gc_safepoint.28252827.out server --log-file /tmp/tidb_cdc_test/gc_safepoint/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/gc_safepoint/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_partition_table/run.sh using Sink-Type: kafka... <<================= [Sat May 4 15:24:21 CST 2024] <<<<<< run test case consistent_partition_table success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:24:21 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e66d781a-8a4a-448d-8202-f912f6685990 {"id":"e66d781a-8a4a-448d-8202-f912f6685990","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807458} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427d1329ce e66d781a-8a4a-448d-8202-f912f6685990 /tidb/cdc/default/default/upstream/7365041896510529357 {"id":7365041896510529357,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e66d781a-8a4a-448d-8202-f912f6685990 {"id":"e66d781a-8a4a-448d-8202-f912f6685990","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807458} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427d1329ce e66d781a-8a4a-448d-8202-f912f6685990 /tidb/cdc/default/default/upstream/7365041896510529357 {"id":7365041896510529357,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e66d781a-8a4a-448d-8202-f912f6685990 {"id":"e66d781a-8a4a-448d-8202-f912f6685990","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807458} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427d1329ce e66d781a-8a4a-448d-8202-f912f6685990 /tidb/cdc/default/default/upstream/7365041896510529357 {"id":7365041896510529357,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.foreign_key.cli.2903.out cli changefeed create --start-ts=449526486026420225 '--sink-uri=kafka://127.0.0.1:9092/ticdc-foreign-key-test-92?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: c9eda196-7be0-4656-a52c-eb3c2278313c Info: {"upstream_id":7365041896510529357,"namespace":"default","id":"c9eda196-7be0-4656-a52c-eb3c2278313c","sink_uri":"kafka://127.0.0.1:9092/ticdc-foreign-key-test-92?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T15:24:22.1118547+08:00","start_ts":449526486026420225,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-71-g41fc0de5f","resolved_ts":449526486026420225,"checkpoint_ts":449526486026420225,"checkpoint_time":"2024-05-04 15:24:17.071"} PASS find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_dup_error_restart/run.sh using Sink-Type: kafka... <<================= [Sat May 4 15:24:22 CST 2024] <<<<<< run test case changefeed_dup_error_restart success! >>>>>> + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:24:22 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f55d68e6-7097-48e3-a7eb-7999cb320b2a {"id":"f55d68e6-7097-48e3-a7eb-7999cb320b2a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807459} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427d1108cd f55d68e6-7097-48e3-a7eb-7999cb320b2a /tidb/cdc/default/default/upstream/7365041895175965841 {"id":7365041895175965841,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f55d68e6-7097-48e3-a7eb-7999cb320b2a {"id":"f55d68e6-7097-48e3-a7eb-7999cb320b2a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807459} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427d1108cd f55d68e6-7097-48e3-a7eb-7999cb320b2a /tidb/cdc/default/default/upstream/7365041895175965841 {"id":7365041895175965841,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f55d68e6-7097-48e3-a7eb-7999cb320b2a {"id":"f55d68e6-7097-48e3-a7eb-7999cb320b2a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807459} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427d1108cd f55d68e6-7097-48e3-a7eb-7999cb320b2a /tidb/cdc/default/default/upstream/7365041895175965841 {"id":7365041895175965841,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.row_format.cli.2920.out cli changefeed create --start-ts=449526486132326401 '--sink-uri=kafka://127.0.0.1:9092/ticdc-row-format-test-21849?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' coverage: 2.4% of statements in github.com/pingcap/tiflow/... Create changefeed successfully! ID: 0f6176b7-623a-4291-8e1e-e92744d91a9a Info: {"upstream_id":7365041895175965841,"namespace":"default","id":"0f6176b7-623a-4291-8e1e-e92744d91a9a","sink_uri":"kafka://127.0.0.1:9092/ticdc-row-format-test-21849?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T15:24:22.506971734+08:00","start_ts":449526486132326401,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-71-g41fc0de5f","resolved_ts":449526486132326401,"checkpoint_ts":449526486132326401,"checkpoint_time":"2024-05-04 15:24:17.475"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... [Sat May 4 15:24:23 CST 2024] <<<<<< START cdc server in kv_client_stream_reconnect case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/kv/kvClientForceReconnect=return(true)' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kv_client_stream_reconnect.28302832.out server --log-file /tmp/tidb_cdc_test/kv_client_stream_reconnect/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kv_client_stream_reconnect/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + set +x [Sat May 4 15:24:23 CST 2024] <<<<<< START kafka consumer in foreign_key case >>>>>> + set +x [Sat May 4 15:24:23 CST 2024] <<<<<< START kafka consumer in row_format case >>>>>> Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828/tiflow-cdc) 3727142400 bytes in 20.35 secs (183115886 bytes/sec) [Pipeline] { + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:24:24 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/0d4c64f4-8f4c-4b91-861f-419b67efc44d {"id":"0d4c64f4-8f4c-4b91-861f-419b67efc44d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807461} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427d19e7cf 0d4c64f4-8f4c-4b91-861f-419b67efc44d /tidb/cdc/default/default/upstream/7365041907288872667 {"id":7365041907288872667,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/0d4c64f4-8f4c-4b91-861f-419b67efc44d {"id":"0d4c64f4-8f4c-4b91-861f-419b67efc44d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807461} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427d19e7cf 0d4c64f4-8f4c-4b91-861f-419b67efc44d /tidb/cdc/default/default/upstream/7365041907288872667 {"id":7365041907288872667,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/0d4c64f4-8f4c-4b91-861f-419b67efc44d {"id":"0d4c64f4-8f4c-4b91-861f-419b67efc44d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807461} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427d19e7cf 0d4c64f4-8f4c-4b91-861f-419b67efc44d /tidb/cdc/default/default/upstream/7365041907288872667 {"id":7365041907288872667,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Pipeline] cache [Sat May 4 15:24:24 CST 2024] <<<<<< START kafka consumer in gc_safepoint case >>>>>> 0 find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_big_messages_v2/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... check diff failed 1-th time, retry later table foreign_key.finish_mark not exists for 1-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:24:26 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/475f9b71-7f15-4cb7-9c2d-22cee1714441 {"id":"475f9b71-7f15-4cb7-9c2d-22cee1714441","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807463} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427d20e0cc 475f9b71-7f15-4cb7-9c2d-22cee1714441 /tidb/cdc/default/default/upstream/7365041913873431242 {"id":7365041913873431242,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/475f9b71-7f15-4cb7-9c2d-22cee1714441 {"id":"475f9b71-7f15-4cb7-9c2d-22cee1714441","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807463} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427d20e0cc 475f9b71-7f15-4cb7-9c2d-22cee1714441 /tidb/cdc/default/default/upstream/7365041913873431242 {"id":7365041913873431242,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/475f9b71-7f15-4cb7-9c2d-22cee1714441 {"id":"475f9b71-7f15-4cb7-9c2d-22cee1714441","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807463} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427d20e0cc 475f9b71-7f15-4cb7-9c2d-22cee1714441 /tidb/cdc/default/default/upstream/7365041913873431242 {"id":7365041913873431242,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 15:24:26 CST 2024] <<<<<< START kafka consumer in kv_client_stream_reconnect case >>>>>> table foreign_key.finish_mark not exists for 2-th check, retry later check diff failed 2-th time, retry later start tidb cluster in /tmp/tidb_cdc_test/kafka_big_messages_v2 Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_big_messages/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Sat May 4 15:24:27 CST 2024] <<<<<< START cdc server in processor_err_chan case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/ProcessorAddTableError=1*return(true)' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.processor_err_chan.29712973.out server --log-file /tmp/tidb_cdc_test/processor_err_chan/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/processor_err_chan/cdc_data --cluster-id default --config /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_err_chan/conf/server.toml --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff successfully check_safepoint_forward http://127.0.0.1:2379 7365041907288872667 449526488929665027 449526487985946630 table foreign_key.finish_mark not exists for 3-th check, retry later run task successfully check_changefeed_state http://127.0.0.1:2379 f46ae884-8f65-4d0b-b34e-2384d710985c stopped null + endpoints=http://127.0.0.1:2379 + changefeed_id=f46ae884-8f65-4d0b-b34e-2384d710985c + expected_state=stopped + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c f46ae884-8f65-4d0b-b34e-2384d710985c -s + info='{ "upstream_id": 7365041907288872667, "namespace": "default", "id": "f46ae884-8f65-4d0b-b34e-2384d710985c", "state": "stopped", "checkpoint_tso": 449526489453953028, "checkpoint_time": "2024-05-04 15:24:30.146", "error": null }' + echo '{ "upstream_id": 7365041907288872667, "namespace": "default", "id": "f46ae884-8f65-4d0b-b34e-2384d710985c", "state": "stopped", "checkpoint_tso": 449526489453953028, "checkpoint_time": "2024-05-04 15:24:30.146", "error": null }' { "upstream_id": 7365041907288872667, "namespace": "default", "id": "f46ae884-8f65-4d0b-b34e-2384d710985c", "state": "stopped", "checkpoint_tso": 449526489453953028, "checkpoint_time": "2024-05-04 15:24:30.146", "error": null } ++ echo '{' '"upstream_id":' 7365041907288872667, '"namespace":' '"default",' '"id":' '"f46ae884-8f65-4d0b-b34e-2384d710985c",' '"state":' '"stopped",' '"checkpoint_tso":' 449526489453953028, '"checkpoint_time":' '"2024-05-04' '15:24:30.146",' '"error":' null '}' ++ jq -r .state + state=stopped + [[ ! stopped == \s\t\o\p\p\e\d ]] ++ echo '{' '"upstream_id":' 7365041907288872667, '"namespace":' '"default",' '"id":' '"f46ae884-8f65-4d0b-b34e-2384d710985c",' '"state":' '"stopped",' '"checkpoint_tso":' 449526489453953028, '"checkpoint_time":' '"2024-05-04' '15:24:30.146",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check_safepoint_equal http://127.0.0.1:2379 7365041907288872667 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:24:31 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/67687b29-bf84-4712-9849-6b14946a0d46 {"id":"67687b29-bf84-4712-9849-6b14946a0d46","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807468} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427d142284 67687b29-bf84-4712-9849-6b14946a0d46 /tidb/cdc/default/default/upstream/7365041906825874133 {"id":7365041906825874133,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/67687b29-bf84-4712-9849-6b14946a0d46 {"id":"67687b29-bf84-4712-9849-6b14946a0d46","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807468} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427d142284 67687b29-bf84-4712-9849-6b14946a0d46 /tidb/cdc/default/default/upstream/7365041906825874133 {"id":7365041906825874133,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/67687b29-bf84-4712-9849-6b14946a0d46 {"id":"67687b29-bf84-4712-9849-6b14946a0d46","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807468} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427d142284 67687b29-bf84-4712-9849-6b14946a0d46 /tidb/cdc/default/default/upstream/7365041906825874133 {"id":7365041906825874133,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 15:24:31 CST 2024] <<<<<< START kafka consumer in processor_err_chan case >>>>>> check_changefeed_state http://127.0.0.1:2379 239b7766-1aac-4a33-b768-f89128f9a428 normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=239b7766-1aac-4a33-b768-f89128f9a428 + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 239b7766-1aac-4a33-b768-f89128f9a428 -s + info='{ "upstream_id": 7365041906825874133, "namespace": "default", "id": "239b7766-1aac-4a33-b768-f89128f9a428", "state": "normal", "checkpoint_tso": 449526489709019138, "checkpoint_time": "2024-05-04 15:24:31.119", "error": null }' + echo '{ "upstream_id": 7365041906825874133, "namespace": "default", "id": "239b7766-1aac-4a33-b768-f89128f9a428", "state": "normal", "checkpoint_tso": 449526489709019138, "checkpoint_time": "2024-05-04 15:24:31.119", "error": null }' { "upstream_id": 7365041906825874133, "namespace": "default", "id": "239b7766-1aac-4a33-b768-f89128f9a428", "state": "normal", "checkpoint_tso": 449526489709019138, "checkpoint_time": "2024-05-04 15:24:31.119", "error": null } ++ echo '{' '"upstream_id":' 7365041906825874133, '"namespace":' '"default",' '"id":' '"239b7766-1aac-4a33-b768-f89128f9a428",' '"state":' '"normal",' '"checkpoint_tso":' 449526489709019138, '"checkpoint_time":' '"2024-05-04' '15:24:31.119",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365041906825874133, '"namespace":' '"default",' '"id":' '"239b7766-1aac-4a33-b768-f89128f9a428",' '"state":' '"normal",' '"checkpoint_tso":' 449526489709019138, '"checkpoint_time":' '"2024-05-04' '15:24:31.119",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table foreign_key.finish_mark not exists for 4-th check, retry later check diff failed 1-th time, retry later start tidb cluster in /tmp/tidb_cdc_test/kafka_big_messages Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table charset_gbk_test0.t0 exists table charset_gbk_test0.t1 exists table charset_gbk_test1.t0 exists table test.finish_mark not exists for 1-th check, retry later table foreign_key.finish_mark not exists for 5-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table row_format.finish_mark not exists for 1-th check, retry later run task successfully check_changefeed_state http://127.0.0.1:2379 f46ae884-8f65-4d0b-b34e-2384d710985c normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=f46ae884-8f65-4d0b-b34e-2384d710985c + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c f46ae884-8f65-4d0b-b34e-2384d710985c -s + info='{ "upstream_id": 7365041907288872667, "namespace": "default", "id": "f46ae884-8f65-4d0b-b34e-2384d710985c", "state": "normal", "checkpoint_tso": 449526489453953028, "checkpoint_time": "2024-05-04 15:24:30.146", "error": null }' + echo '{ "upstream_id": 7365041907288872667, "namespace": "default", "id": "f46ae884-8f65-4d0b-b34e-2384d710985c", "state": "normal", "checkpoint_tso": 449526489453953028, "checkpoint_time": "2024-05-04 15:24:30.146", "error": null }' { "upstream_id": 7365041907288872667, "namespace": "default", "id": "f46ae884-8f65-4d0b-b34e-2384d710985c", "state": "normal", "checkpoint_tso": 449526489453953028, "checkpoint_time": "2024-05-04 15:24:30.146", "error": null } ++ echo '{' '"upstream_id":' 7365041907288872667, '"namespace":' '"default",' '"id":' '"f46ae884-8f65-4d0b-b34e-2384d710985c",' '"state":' '"normal",' '"checkpoint_tso":' 449526489453953028, '"checkpoint_time":' '"2024-05-04' '15:24:30.146",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365041907288872667, '"namespace":' '"default",' '"id":' '"f46ae884-8f65-4d0b-b34e-2384d710985c",' '"state":' '"normal",' '"checkpoint_tso":' 449526489453953028, '"checkpoint_time":' '"2024-05-04' '15:24:30.146",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check_safepoint_forward http://127.0.0.1:2379 7365041907288872667 449526489453953027 449526489453953028 check diff failed 2-th time, retry later table test.finish_mark not exists for 2-th check, retry later table row_format.finish_mark not exists for 2-th check, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release run task successfully check_changefeed_state http://127.0.0.1:2379 f46ae884-8f65-4d0b-b34e-2384d710985c stopped null + endpoints=http://127.0.0.1:2379 + changefeed_id=f46ae884-8f65-4d0b-b34e-2384d710985c + expected_state=stopped + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c f46ae884-8f65-4d0b-b34e-2384d710985c -s table foreign_key.finish_mark not exists for 6-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + info='{ "upstream_id": 7365041907288872667, "namespace": "default", "id": "f46ae884-8f65-4d0b-b34e-2384d710985c", "state": "stopped", "checkpoint_tso": 449526489453953028, "checkpoint_time": "2024-05-04 15:24:30.146", "error": null }' + echo '{ "upstream_id": 7365041907288872667, "namespace": "default", "id": "f46ae884-8f65-4d0b-b34e-2384d710985c", "state": "stopped", "checkpoint_tso": 449526489453953028, "checkpoint_time": "2024-05-04 15:24:30.146", "error": null }' { "upstream_id": 7365041907288872667, "namespace": "default", "id": "f46ae884-8f65-4d0b-b34e-2384d710985c", "state": "stopped", "checkpoint_tso": 449526489453953028, "checkpoint_time": "2024-05-04 15:24:30.146", "error": null } ++ echo '{' '"upstream_id":' 7365041907288872667, '"namespace":' '"default",' '"id":' '"f46ae884-8f65-4d0b-b34e-2384d710985c",' '"state":' '"stopped",' '"checkpoint_tso":' 449526489453953028, '"checkpoint_time":' '"2024-05-04' '15:24:30.146",' '"error":' null '}' ++ jq -r .state + state=stopped + [[ ! stopped == \s\t\o\p\p\e\d ]] ++ echo '{' '"upstream_id":' 7365041907288872667, '"namespace":' '"default",' '"id":' '"f46ae884-8f65-4d0b-b34e-2384d710985c",' '"state":' '"stopped",' '"checkpoint_tso":' 449526489453953028, '"checkpoint_time":' '"2024-05-04' '15:24:30.146",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check_changefeed_state http://127.0.0.1:2379 9e089ff6-7d57-41a8-b0dd-91a56724f906 normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=9e089ff6-7d57-41a8-b0dd-91a56724f906 + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 9e089ff6-7d57-41a8-b0dd-91a56724f906 -s check diff successfully + info='{ "upstream_id": 7365041907288872667, "namespace": "default", "id": "9e089ff6-7d57-41a8-b0dd-91a56724f906", "state": "normal", "checkpoint_tso": 449526490974126086, "checkpoint_time": "2024-05-04 15:24:35.945", "error": null }' + echo '{ "upstream_id": 7365041907288872667, "namespace": "default", "id": "9e089ff6-7d57-41a8-b0dd-91a56724f906", "state": "normal", "checkpoint_tso": 449526490974126086, "checkpoint_time": "2024-05-04 15:24:35.945", "error": null }' { "upstream_id": 7365041907288872667, "namespace": "default", "id": "9e089ff6-7d57-41a8-b0dd-91a56724f906", "state": "normal", "checkpoint_tso": 449526490974126086, "checkpoint_time": "2024-05-04 15:24:35.945", "error": null } ++ echo '{' '"upstream_id":' 7365041907288872667, '"namespace":' '"default",' '"id":' '"9e089ff6-7d57-41a8-b0dd-91a56724f906",' '"state":' '"normal",' '"checkpoint_tso":' 449526490974126086, '"checkpoint_time":' '"2024-05-04' '15:24:35.945",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365041907288872667, '"namespace":' '"default",' '"id":' '"9e089ff6-7d57-41a8-b0dd-91a56724f906",' '"state":' '"normal",' '"checkpoint_tso":' 449526490974126086, '"checkpoint_time":' '"2024-05-04' '15:24:35.945",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check_safepoint_equal http://127.0.0.1:2379 7365041907288872667 table test.finish_mark exists check table exists success check diff successfully wait process cdc.test exit for 1-th time... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 15:24:37 CST 2024] <<<<<< run test case processor_err_chan success! >>>>>> table row_format.finish_mark not exists for 3-th check, retry later table foreign_key.finish_mark not exists for 7-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 15:24:38 CST 2024] <<<<<< run test case charset_gbk success! >>>>>> Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828/tiflow-cdc) 3727142400 bytes in 12.38 secs (301112793 bytes/sec) table row_format.finish_mark not exists for 4-th check, retry later [Pipeline] { [Pipeline] cache ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) run task successfully Changefeed remove successfully. ID: f46ae884-8f65-4d0b-b34e-2384d710985c CheckpointTs: 449526489453953028 SinkURI: kafka://127.0.0.1:9092/ticdc-gc-safepoint-9724?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 check_safepoint_forward http://127.0.0.1:2379 7365041907288872667 449526491812986884 449526490974126086 449526489453953028 table foreign_key.finish_mark not exists for 8-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) run task successfully Changefeed remove successfully. ID: 9e089ff6-7d57-41a8-b0dd-91a56724f906 CheckpointTs: 449526492075130885 SinkURI: kafka://127.0.0.1:9092/ticdc-gc-safepoint-9724?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 check_safepoint_cleared http://127.0.0.1:2379 7365041907288872667 run task successfully table row_format.finish_mark not exists for 5-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 1-th time... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f668140012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j1vs1-n66zh, pid:2053, start at 2024-05-04 15:24:38.826007833 +0800 CST m=+5.288120806 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:38.833 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:38.839 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:38.839 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f668140012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j1vs1-n66zh, pid:2053, start at 2024-05-04 15:24:38.826007833 +0800 CST m=+5.288120806 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:38.833 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:38.839 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:38.839 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f669800002 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j1vs1-n66zh, pid:2133, start at 2024-05-04 15:24:38.881239565 +0800 CST m=+5.288040756 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:38.887 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:38.880 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:38.880 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } wait process cdc.test exit for 2-th time... table foreign_key.finish_mark not exists for 9-th check, retry later cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 15:24:42 CST 2024] <<<<<< run test case gc_safepoint success! >>>>>> table row_format.finish_mark not exists for 6-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f69e08000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rwb1z-5xmkw, pid:1807, start at 2024-05-04 15:24:42.258711548 +0800 CST m=+5.170815288 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:42.266 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:42.242 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:42.242 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table foreign_key.finish_mark not exists for 10-th check, retry later [Sat May 4 15:24:43 CST 2024] <<<<<< START cdc server in kafka_big_messages_v2 case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_big_messages_v2.36223624.out server --log-file /tmp/tidb_cdc_test/kafka_big_messages_v2/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_big_messages_v2/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table row_format.finish_mark not exists for 7-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f69e08000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rwb1z-5xmkw, pid:1807, start at 2024-05-04 15:24:42.258711548 +0800 CST m=+5.170815288 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:42.266 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:42.242 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:42.242 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f69eac0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rwb1z-5xmkw, pid:1891, start at 2024-05-04 15:24:42.304665929 +0800 CST m=+5.159532753 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:26:42.310 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:24:42.283 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:14:42.283 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_big_messages/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_big_messages/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_big_messages/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_big_messages/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_big_messages/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table foreign_key.finish_mark not exists for 11-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:24:47 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bf9eab11-b3cb-4524-9a9c-52ab7675e54f {"id":"bf9eab11-b3cb-4524-9a9c-52ab7675e54f","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807484} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427d7192ca bf9eab11-b3cb-4524-9a9c-52ab7675e54f /tidb/cdc/default/default/upstream/7365042004962025867 {"id":7365042004962025867,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bf9eab11-b3cb-4524-9a9c-52ab7675e54f {"id":"bf9eab11-b3cb-4524-9a9c-52ab7675e54f","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807484} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427d7192ca bf9eab11-b3cb-4524-9a9c-52ab7675e54f /tidb/cdc/default/default/upstream/7365042004962025867 {"id":7365042004962025867,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bf9eab11-b3cb-4524-9a9c-52ab7675e54f {"id":"bf9eab11-b3cb-4524-9a9c-52ab7675e54f","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807484} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427d7192ca bf9eab11-b3cb-4524-9a9c-52ab7675e54f /tidb/cdc/default/default/upstream/7365042004962025867 {"id":7365042004962025867,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: fa842909-7f21-4387-a6b9-b626cd8eba87 Info: {"upstream_id":7365042004962025867,"namespace":"default","id":"fa842909-7f21-4387-a6b9-b626cd8eba87","sink_uri":"kafka://127.0.0.1:9092/big-message-test?protocol=open-protocol\u0026partition-num=1\u0026kafka-version=2.4.1\u0026max-message-bytes=12582912","create_time":"2024-05-04T15:24:47.280171667+08:00","start_ts":449526493069705218,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-71-g41fc0de5f","resolved_ts":449526493069705218,"checkpoint_ts":449526493069705218,"checkpoint_time":"2024-05-04 15:24:43.939"} [Sat May 4 15:24:47 CST 2024] <<<<<< START kafka consumer in kafka_big_messages_v2 case >>>>>> Starting generate kafka big messages... go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading go.uber.org/atomic v1.11.0 table row_format.finish_mark not exists for 8-th check, retry later [Sat May 4 15:24:47 CST 2024] <<<<<< START cdc server in kafka_big_messages case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_big_messages.33923394.out server --log-file /tmp/tidb_cdc_test/kafka_big_messages/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_big_messages/cdc_data --cluster-id default + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table foreign_key.finish_mark exists check diff successfully wait process cdc.test exit for 1-th time... table row_format.finish_mark not exists for 9-th check, retry later wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 15:24:50 CST 2024] <<<<<< run test case foreign_key success! >>>>>> + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:24:50 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5e5fe6e8-6def-42fa-9e7b-b83bca38b33b {"id":"5e5fe6e8-6def-42fa-9e7b-b83bca38b33b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807487} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427d7d7eca 5e5fe6e8-6def-42fa-9e7b-b83bca38b33b /tidb/cdc/default/default/upstream/7365042018753019228 {"id":7365042018753019228,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5e5fe6e8-6def-42fa-9e7b-b83bca38b33b {"id":"5e5fe6e8-6def-42fa-9e7b-b83bca38b33b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807487} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427d7d7eca 5e5fe6e8-6def-42fa-9e7b-b83bca38b33b /tidb/cdc/default/default/upstream/7365042018753019228 {"id":7365042018753019228,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5e5fe6e8-6def-42fa-9e7b-b83bca38b33b {"id":"5e5fe6e8-6def-42fa-9e7b-b83bca38b33b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807487} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427d7d7eca 5e5fe6e8-6def-42fa-9e7b-b83bca38b33b /tidb/cdc/default/default/upstream/7365042018753019228 {"id":7365042018753019228,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: 09e057f6-0d74-460d-98cd-b38def6b0ce8 Info: {"upstream_id":7365042018753019228,"namespace":"default","id":"09e057f6-0d74-460d-98cd-b38def6b0ce8","sink_uri":"kafka://127.0.0.1:9092/big-message-test?protocol=open-protocol\u0026partition-num=1\u0026kafka-version=2.4.1\u0026max-message-bytes=12582912","create_time":"2024-05-04T15:24:50.796038758+08:00","start_ts":449526494001102849,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-71-g41fc0de5f","resolved_ts":449526494001102849,"checkpoint_ts":449526494001102849,"checkpoint_time":"2024-05-04 15:24:47.492"} [Sat May 4 15:24:50 CST 2024] <<<<<< START kafka consumer in kafka_big_messages case >>>>>> Starting generate kafka big messages... go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading go.uber.org/atomic v1.11.0 table row_format.finish_mark not exists for 10-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/ddl_manager/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table row_format.finish_mark not exists for 11-th check, retry later table kafka_big_messages.test exists check diff failed 1-th time, retry later table row_format.finish_mark exists check diff successfully =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_reconstruct/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... wait process cdc.test exit for 1-th time... table kafka_big_messages.test not exists for 1-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/ddl_manager Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... wait process cdc.test exit for 2-th time... check diff failed 2-th time, retry later cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 15:24:56 CST 2024] <<<<<< run test case row_format success! >>>>>> =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_pause_resume/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Verifying downstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/changefeed_reconstruct Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... check diff successfully table kafka_big_messages.test exists check diff failed 1-th time, retry later wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 15:25:00 CST 2024] <<<<<< run test case kafka_big_messages_v2 success! >>>>>> check diff successfully wait process cdc.test exit for 1-th time... Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828/tiflow-cdc) 3727142400 bytes in 19.20 secs (194083941 bytes/sec) [Pipeline] { [Pipeline] cache Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/ddl_puller_lag/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release wait process cdc.test exit for 2-th time... start tidb cluster in /tmp/tidb_cdc_test/changefeed_pause_resume Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 15:25:02 CST 2024] <<<<<< run test case kafka_big_messages success! >>>>>> Starting Upstream TiDB... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 1-th time, retry later check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 15:25:03 CST 2024] <<<<<< run test case kv_client_stream_reconnect success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... start tidb cluster in /tmp/tidb_cdc_test/ddl_puller_lag Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/drop_many_tables/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f81ed4000c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j5hwn-r8817, pid:4166, start at 2024-05-04 15:25:06.881074854 +0800 CST m=+5.182308366 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:06.890 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:06.869 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:06.869 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f81ed4000c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j5hwn-r8817, pid:4166, start at 2024-05-04 15:25:06.881074854 +0800 CST m=+5.182308366 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:06.890 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:06.869 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:06.869 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f820540014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j5hwn-r8817, pid:4238, start at 2024-05-04 15:25:07.001627842 +0800 CST m=+5.249856420 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:07.008 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:06.965 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:06.965 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/ddl_manager/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/ddl_manager/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/ddl_manager/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/ddl_manager/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/ddl_manager/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f84184000a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-hlzqq-jkp5b, pid:4407, start at 2024-05-04 15:25:09.099657784 +0800 CST m=+5.217291893 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:09.107 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:09.089 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:09.089 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f84184000a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-hlzqq-jkp5b, pid:4407, start at 2024-05-04 15:25:09.099657784 +0800 CST m=+5.217291893 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:09.107 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:09.089 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:09.089 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f842fc0005 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-hlzqq-jkp5b, pid:4491, start at 2024-05-04 15:25:09.186463931 +0800 CST m=+5.250316138 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:09.192 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:09.183 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:09.183 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/changefeed_reconstruct/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/changefeed_reconstruct/tiflash/log/error.log arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_reconstruct/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_reconstruct/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_reconstruct/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... [Sat May 4 15:25:12 CST 2024] <<<<<< START cdc server in ddl_manager case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/ExecuteDDLSlowly=return(true)' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_manager.55785580.out server --log-file /tmp/tidb_cdc_test/ddl_manager/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_manager/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [Sat May 4 15:25:12 CST 2024] <<<<<< START cdc server in changefeed_reconstruct case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_reconstruct.58925894.out server --log-file /tmp/tidb_cdc_test/changefeed_reconstruct/cdcserver1.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_reconstruct/cdc_dataserver1 --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_tables_ddl_v2/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release start tidb cluster in /tmp/tidb_cdc_test/drop_many_tables Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:25:15 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9f201e51-aeb4-41e5-a6bb-9797cba9ed0a {"id":"9f201e51-aeb4-41e5-a6bb-9797cba9ed0a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807512} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ddd7fd1 9f201e51-aeb4-41e5-a6bb-9797cba9ed0a /tidb/cdc/default/default/upstream/7365042122591326955 {"id":7365042122591326955,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9f201e51-aeb4-41e5-a6bb-9797cba9ed0a {"id":"9f201e51-aeb4-41e5-a6bb-9797cba9ed0a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807512} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ddd7fd1 9f201e51-aeb4-41e5-a6bb-9797cba9ed0a /tidb/cdc/default/default/upstream/7365042122591326955 {"id":7365042122591326955,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9f201e51-aeb4-41e5-a6bb-9797cba9ed0a {"id":"9f201e51-aeb4-41e5-a6bb-9797cba9ed0a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807512} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ddd7fd1 9f201e51-aeb4-41e5-a6bb-9797cba9ed0a /tidb/cdc/default/default/upstream/7365042122591326955 {"id":7365042122591326955,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_manager.cli.5640.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-507?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c=ddl-manager =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:25:15 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c78e90dc-e50f-48eb-8c49-c83ff7c81911 {"id":"c78e90dc-e50f-48eb-8c49-c83ff7c81911","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807512} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427de438c3 c78e90dc-e50f-48eb-8c49-c83ff7c81911 /tidb/cdc/default/default/upstream/7365042132377157249 {"id":7365042132377157249,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c78e90dc-e50f-48eb-8c49-c83ff7c81911 {"id":"c78e90dc-e50f-48eb-8c49-c83ff7c81911","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807512} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427de438c3 c78e90dc-e50f-48eb-8c49-c83ff7c81911 /tidb/cdc/default/default/upstream/7365042132377157249 {"id":7365042132377157249,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c78e90dc-e50f-48eb-8c49-c83ff7c81911 {"id":"c78e90dc-e50f-48eb-8c49-c83ff7c81911","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807512} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427de438c3 c78e90dc-e50f-48eb-8c49-c83ff7c81911 /tidb/cdc/default/default/upstream/7365042132377157249 {"id":7365042132377157249,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 15:25:15 CST 2024] <<<<<< START kafka consumer in changefeed_reconstruct case >>>>>> ***************** properties ***************** "mysql.user"="root" "recordcount"="50" "threadcount"="4" "readproportion"="0" "updateproportion"="0" "operationcount"="0" "scanproportion"="0" "mysql.db"="changefeed_reconstruct" "requestdistribution"="uniform" "readallfields"="true" "mysql.port"="4000" "dotransactions"="false" "insertproportion"="0" "workload"="core" "mysql.host"="127.0.0.1" ********************************************** Run finished, takes 16.483818ms INSERT - Takes(s): 0.0, Count: 48, OPS: 3766.9, Avg(us): 1295, Min(us): 874, Max(us): 3714, 95th(us): 4000, 99th(us): 4000 table changefeed_reconstruct.usertable not exists for 1-th check, retry later Create changefeed successfully! ID: ddl-manager Info: {"upstream_id":7365042122591326955,"namespace":"default","id":"ddl-manager","sink_uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-507?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T15:25:15.56919285+08:00","start_ts":449526501321736197,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-71-g41fc0de5f","resolved_ts":449526501321736197,"checkpoint_ts":449526501321736197,"checkpoint_time":"2024-05-04 15:25:15.418"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... Verifying downstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/multi_tables_ddl_v2 Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f88b64001a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-dwfjx-p9mf6, pid:4890, start at 2024-05-04 15:25:13.860534815 +0800 CST m=+5.246876766 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:13.867 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:13.867 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:13.867 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f88b64001a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-dwfjx-p9mf6, pid:4890, start at 2024-05-04 15:25:13.860534815 +0800 CST m=+5.246876766 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:13.867 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:13.867 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:13.867 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f88cdc0015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-dwfjx-p9mf6, pid:4972, start at 2024-05-04 15:25:13.953210751 +0800 CST m=+5.284874318 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:13.960 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:13.961 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:13.961 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_pause_resume/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + set +x [Sat May 4 15:25:17 CST 2024] <<<<<< START kafka consumer in ddl_manager case >>>>>> table changefeed_reconstruct.usertable not exists for 2-th check, retry later [Sat May 4 15:25:18 CST 2024] <<<<<< START cdc server in changefeed_pause_resume case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + GO_FAILPOINTS= + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_pause_resume.63146316.out server --log-file /tmp/tidb_cdc_test/changefeed_pause_resume/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_pause_resume/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/cdc/run.sh using Sink-Type: kafka... <<================= start tidb cluster in /tmp/tidb_cdc_test/kafka_compression Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f8e3d00013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-cxnnw-bkcrr, pid:4169, start at 2024-05-04 15:25:19.50693422 +0800 CST m=+5.112083864 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:19.514 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:19.476 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:19.476 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f8e3d00013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-cxnnw-bkcrr, pid:4169, start at 2024-05-04 15:25:19.50693422 +0800 CST m=+5.112083864 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:19.514 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:19.476 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:19.476 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f8e4800015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-cxnnw-bkcrr, pid:4255, start at 2024-05-04 15:25:19.556317832 +0800 CST m=+5.108447540 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:19.564 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:19.570 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:19.570 +0800 All versions after safe point can be accessed. (DO NOT EDIT) table changefeed_reconstruct.usertable exists check diff successfully Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/ddl_puller_lag/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/ddl_puller_lag/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/ddl_puller_lag/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/ddl_puller_lag/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/ddl_puller_lag/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } wait process 5897 exit for 1-th time... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:25:22 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/8e06fcc7-6484-40c5-9fd7-924591ff69ed {"id":"8e06fcc7-6484-40c5-9fd7-924591ff69ed","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807519} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427df6b0cd 8e06fcc7-6484-40c5-9fd7-924591ff69ed /tidb/cdc/default/default/upstream/7365042155307332958 {"id":7365042155307332958,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/8e06fcc7-6484-40c5-9fd7-924591ff69ed {"id":"8e06fcc7-6484-40c5-9fd7-924591ff69ed","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807519} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427df6b0cd 8e06fcc7-6484-40c5-9fd7-924591ff69ed /tidb/cdc/default/default/upstream/7365042155307332958 {"id":7365042155307332958,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/8e06fcc7-6484-40c5-9fd7-924591ff69ed {"id":"8e06fcc7-6484-40c5-9fd7-924591ff69ed","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807519} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427df6b0cd 8e06fcc7-6484-40c5-9fd7-924591ff69ed /tidb/cdc/default/default/upstream/7365042155307332958 {"id":7365042155307332958,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Verifying downstream PD is started... [Sat May 4 15:25:22 CST 2024] <<<<<< START kafka consumer in changefeed_pause_resume case >>>>>> wait process 5897 exit for 2-th time... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils/kill_cdc_pid: line 19: kill: (5897) - No such process wait process 5897 exit for 3-th time... process 5897 already exit check_no_capture http://127.0.0.1:2379 parse error: Invalid numeric literal at line 1, column 6 run task successfully [Sat May 4 15:25:23 CST 2024] <<<<<< START cdc server in changefeed_reconstruct case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) + GO_FAILPOINTS= + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_reconstruct.61686170.out server --log-file /tmp/tidb_cdc_test/changefeed_reconstruct/cdcserver2.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_reconstruct/cdc_dataserver2 --cluster-id default --addr 127.0.0.1:8300 ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 The 1 times to try to start tidb cluster... + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_puller_lag.cli.5640.out cli tso query --pd=http://127.0.0.1:2379 Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table changefeed_pause_resume.t1 not exists for 1-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f92be40009 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-62dq1-4td68, pid:4171, start at 2024-05-04 15:25:24.099440448 +0800 CST m=+5.185113590 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:24.106 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:24.089 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:24.089 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + tso='449526503551795201 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449526503551795201 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 15:25:25 CST 2024] <<<<<< START cdc server in ddl_puller_lag case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/processorDDLResolved=1*sleep(180000)' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_puller_lag.56865688.out server --log-file /tmp/tidb_cdc_test/ddl_puller_lag/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_puller_lag/cdc_data --cluster-id default + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:25:26 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: changefeedID: default/1c5b6c77-4fe1-43d9-9f22-84c2754c832a {UpstreamID:7365042132377157249 Namespace:default ID:1c5b6c77-4fe1-43d9-9f22-84c2754c832a SinkURI:kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-31067?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 15:25:15.494059609 +0800 CST StartTs:449526501301288965 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc00321b680 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-71-g41fc0de5f Epoch:449526501340610561} {CheckpointTs:449526502769295362 MinTableBarrierTs:449526504080015364 AdminJobType:noop} span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449526502769295362, checkpointTs: 449526502769295362, state: Preparing *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e02ca918-a116-49e0-9a7c-d66970a06b3b {"id":"e02ca918-a116-49e0-9a7c-d66970a06b3b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807523} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427de43967 e02ca918-a116-49e0-9a7c-d66970a06b3b /tidb/cdc/default/default/changefeed/info/1c5b6c77-4fe1-43d9-9f22-84c2754c832a {"upstream-id":7365042132377157249,"namespace":"default","changefeed-id":"1c5b6c77-4fe1-43d9-9f22-84c2754c832a","sink-uri":"kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-31067?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T15:25:15.494059609+08:00","start-ts":449526501301288965,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-71-g41fc0de5f","epoch":449526501340610561} /tidb/cdc/default/default/changefeed/status/1c5b6c77-4fe1-43d9-9f22-84c2754c832a {"checkpoint-ts":449526502769295362,"min-table-barrier-ts":449526504080015364,"admin-job-type":0} /tidb/cdc/default/default/task/position/e02ca918-a116-49e0-9a7c-d66970a06b3b/1c5b6c77-4fe1-43d9-9f22-84c2754c832a {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365042132377157249 {"id":7365042132377157249,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/1c5b6c77-4fe1-43d9-9f22-84c2754c832a {UpstreamID:7365042132377157249 Namespace:default ID:1c5b6c77-4fe1-43d9-9f22-84c2754c832a SinkURI:kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-31067?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 15:25:15.494059609 +0800 CST StartTs:449526501301288965 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc00321b680 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-71-g41fc0de5f Epoch:449526501340610561} {CheckpointTs:449526502769295362 MinTableBarrierTs:449526504080015364 AdminJobType:noop} span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449526502769295362, checkpointTs: 449526502769295362, state: Preparing *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e02ca918-a116-49e0-9a7c-d66970a06b3b {"id":"e02ca918-a116-49e0-9a7c-d66970a06b3b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807523} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427de43967 e02ca918-a116-49e0-9a7c-d66970a06b3b /tidb/cdc/default/default/changefeed/info/1c5b6c77-4fe1-43d9-9f22-84c2754c832a {"upstream-id":7365042132377157249,"namespace":"default","changefeed-id":"1c5b6c77-4fe1-43d9-9f22-84c2754c832a","sink-uri":"kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-31067?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T15:25:15.494059609+08:00","start-ts":449526501301288965,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-71-g41fc0de5f","epoch":449526501340610561} /tidb/cdc/default/default/changefeed/status/1c5b6c77-4fe1-43d9-9f22-84c2754c832a {"checkpoint-ts":449526502769295362,"min-table-barrier-ts":449526504080015364,"admin-job-type":0} /tidb/cdc/default/default/task/position/e02ca918-a116-49e0-9a7c-d66970a06b3b/1c5b6c77-4fe1-43d9-9f22-84c2754c832a {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365042132377157249 {"id":7365042132377157249,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/1c5b6c77-4fe1-43d9-9f22-84c2754c832a {UpstreamID:7365042132377157249 Namespace:default ID:1c5b6c77-4fe1-43d9-9f22-84c2754c832a SinkURI:kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-31067?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 15:25:15.494059609 +0800 CST StartTs:449526501301288965 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc00321b680 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-71-g41fc0de5f Epoch:449526501340610561} {CheckpointTs:449526502769295362 MinTableBarrierTs:449526504080015364 AdminJobType:noop} span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449526502769295362, checkpointTs: 449526502769295362, state: Preparing *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e02ca918-a116-49e0-9a7c-d66970a06b3b {"id":"e02ca918-a116-49e0-9a7c-d66970a06b3b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807523} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427de43967 e02ca918-a116-49e0-9a7c-d66970a06b3b /tidb/cdc/default/default/changefeed/info/1c5b6c77-4fe1-43d9-9f22-84c2754c832a {"upstream-id":7365042132377157249,"namespace":"default","changefeed-id":"1c5b6c77-4fe1-43d9-9f22-84c2754c832a","sink-uri":"kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-31067?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T15:25:15.494059609+08:00","start-ts":449526501301288965,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-71-g41fc0de5f","epoch":449526501340610561} /tidb/cdc/default/default/changefeed/status/1c5b6c77-4fe1-43d9-9f22-84c2754c832a {"checkpoint-ts":449526502769295362,"min-table-barrier-ts":449526504080015364,"admin-job-type":0} /tidb/cdc/default/default/task/position/e02ca918-a116-49e0-9a7c-d66970a06b3b/1c5b6c77-4fe1-43d9-9f22-84c2754c832a {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365042132377157249 {"id":7365042132377157249,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x cdc.test cli capture list --pd=http://127.0.0.1:2379 2>&1 | grep id table changefeed_pause_resume.t1 exists table changefeed_pause_resume.t2 exists table changefeed_pause_resume.t3 not exists for 1-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f92be40009 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-62dq1-4td68, pid:4171, start at 2024-05-04 15:25:24.099440448 +0800 CST m=+5.185113590 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:24.106 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:24.089 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:24.089 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f92bcc0015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-62dq1-4td68, pid:4255, start at 2024-05-04 15:25:24.121761032 +0800 CST m=+5.154333071 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:24.127 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:24.133 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:24.133 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/drop_many_tables/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/drop_many_tables/tiflash/log/error.log arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/drop_many_tables/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/drop_many_tables/tiflash-proxy.toml"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/drop_many_tables/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) "id": "e02ca918-a116-49e0-9a7c-d66970a06b3b", "cluster-id": "default" run task successfully table changefeed_pause_resume.t3 exists ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:25:28 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/708119e4-64e5-4650-b961-67dc01580f18 {"id":"708119e4-64e5-4650-b961-67dc01580f18","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807525} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427e0f23f0 708119e4-64e5-4650-b961-67dc01580f18 /tidb/cdc/default/default/upstream/7365042176124442128 {"id":7365042176124442128,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/708119e4-64e5-4650-b961-67dc01580f18 {"id":"708119e4-64e5-4650-b961-67dc01580f18","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807525} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427e0f23f0 708119e4-64e5-4650-b961-67dc01580f18 /tidb/cdc/default/default/upstream/7365042176124442128 {"id":7365042176124442128,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/708119e4-64e5-4650-b961-67dc01580f18 {"id":"708119e4-64e5-4650-b961-67dc01580f18","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807525} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427e0f23f0 708119e4-64e5-4650-b961-67dc01580f18 /tidb/cdc/default/default/upstream/7365042176124442128 {"id":7365042176124442128,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_puller_lag.cli.5754.out cli changefeed create --start-ts=449526503551795201 '--sink-uri=kafka+ssl://127.0.0.1:9092/ticdc-ddl-puller-lag-test-14175?protocol=open-protocol&partition-num=4&kafka-client-id=ddl_puller_lag&kafka-version=2.4.1&max-message-bytes=10485760' capture_id: e02ca918-a116-49e0-9a7c-d66970a06b3b check_processor_table_count http://127.0.0.1:2379 1c5b6c77-4fe1-43d9-9f22-84c2754c832a e02ca918-a116-49e0-9a7c-d66970a06b3b 1 Create changefeed successfully! ID: 359dc11b-28b6-4019-8c0e-1bf0eba6e636 Info: {"upstream_id":7365042176124442128,"namespace":"default","id":"359dc11b-28b6-4019-8c0e-1bf0eba6e636","sink_uri":"kafka+ssl://127.0.0.1:9092/ticdc-ddl-puller-lag-test-14175?protocol=open-protocol\u0026partition-num=4\u0026kafka-client-id=ddl_puller_lag\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T15:25:29.138638979+08:00","start_ts":449526503551795201,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-71-g41fc0de5f","resolved_ts":449526503551795201,"checkpoint_ts":449526503551795201,"checkpoint_time":"2024-05-04 15:25:23.925"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f95d24000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j1vs1-n66zh, pid:6071, start at 2024-05-04 15:25:27.258401005 +0800 CST m=+6.274860794 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:27.268 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:27.241 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:27.241 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f95d24000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j1vs1-n66zh, pid:6071, start at 2024-05-04 15:25:27.258401005 +0800 CST m=+6.274860794 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:27.268 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:27.241 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:27.241 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f95ebc0007 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j1vs1-n66zh, pid:6157, start at 2024-05-04 15:25:27.350472232 +0800 CST m=+6.314306886 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:27.357 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:27.343 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:27.343 +0800 All versions after safe point can be accessed. (DO NOT EDIT) run task successfully Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/log/error.log arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.drop_many_tables.cli.5699.out cli tso query --pd=http://127.0.0.1:2379 check_processor_table_count http://127.0.0.1:2379 1c5b6c77-4fe1-43d9-9f22-84c2754c832a e02ca918-a116-49e0-9a7c-d66970a06b3b 0 start tidb cluster in /tmp/tidb_cdc_test/cdc Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table count 1 does equal to expected count 0 run task failed 1-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x [Sat May 4 15:25:30 CST 2024] <<<<<< START kafka consumer in ddl_puller_lag case >>>>>> + set +x + tso='449526505023733761 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449526505023733761 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 15:25:30 CST 2024] <<<<<< START cdc server in drop_many_tables case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.drop_many_tables.57395741.out server --log-file /tmp/tidb_cdc_test/drop_many_tables/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/drop_many_tables/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [Sat May 4 15:25:31 CST 2024] <<<<<< START cdc server in multi_tables_ddl_v2 case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_tables_ddl_v2.75467548.out server --log-file /tmp/tidb_cdc_test/multi_tables_ddl_v2/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_tables_ddl_v2/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check_processor_table_count http://127.0.0.1:2379 1c5b6c77-4fe1-43d9-9f22-84c2754c832a e02ca918-a116-49e0-9a7c-d66970a06b3b 0 run task successfully ***************** properties ***************** "workload"="core" "readproportion"="0" "mysql.host"="127.0.0.1" "scanproportion"="0" "operationcount"="0" "insertproportion"="0" "mysql.user"="root" "mysql.port"="4000" "mysql.db"="changefeed_reconstruct" "threadcount"="4" "updateproportion"="0" "dotransactions"="false" "recordcount"="50" "readallfields"="true" "requestdistribution"="uniform" ********************************************** Run finished, takes 18.256533ms INSERT - Takes(s): 0.0, Count: 48, OPS: 3434.0, Avg(us): 1450, Min(us): 961, Max(us): 4561, 95th(us): 5000, 99th(us): 5000 table changefeed_reconstruct.usertable not exists for 1-th check, retry later check diff failed 1-th time, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f9a0400004 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rwb1z-5xmkw, pid:5787, start at 2024-05-04 15:25:31.540152245 +0800 CST m=+5.072610792 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:31.546 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:31.536 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:31.536 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f9a0400004 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rwb1z-5xmkw, pid:5787, start at 2024-05-04 15:25:31.540152245 +0800 CST m=+5.072610792 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:31.546 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:31.536 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:31.536 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09f9a1e00016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rwb1z-5xmkw, pid:5870, start at 2024-05-04 15:25:31.680176419 +0800 CST m=+5.157829656 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:31.687 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:31.689 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:31.689 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_compression/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_compression/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_compression/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_compression/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_compression/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:25:34 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/63373347-7c98-40c2-85ec-f80bce5b2aad {"id":"63373347-7c98-40c2-85ec-f80bce5b2aad","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807531} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427e20c3d7 63373347-7c98-40c2-85ec-f80bce5b2aad /tidb/cdc/default/default/upstream/7365042195775048452 {"id":7365042195775048452,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/63373347-7c98-40c2-85ec-f80bce5b2aad {"id":"63373347-7c98-40c2-85ec-f80bce5b2aad","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807531} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427e20c3d7 63373347-7c98-40c2-85ec-f80bce5b2aad /tidb/cdc/default/default/upstream/7365042195775048452 {"id":7365042195775048452,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/63373347-7c98-40c2-85ec-f80bce5b2aad {"id":"63373347-7c98-40c2-85ec-f80bce5b2aad","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807531} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427e20c3d7 63373347-7c98-40c2-85ec-f80bce5b2aad /tidb/cdc/default/default/upstream/7365042195775048452 {"id":7365042195775048452,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.drop_many_tables.cli.5802.out cli changefeed create --start-ts=449526505023733761 '--sink-uri=kafka://127.0.0.1:9092/ticdc-drop-tables-test-29081?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' table changefeed_reconstruct.usertable exists Create changefeed successfully! ID: 29fe90a0-5213-4909-b03e-04f4ab9bd451 Info: {"upstream_id":7365042195775048452,"namespace":"default","id":"29fe90a0-5213-4909-b03e-04f4ab9bd451","sink_uri":"kafka://127.0.0.1:9092/ticdc-drop-tables-test-29081?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T15:25:34.555390312+08:00","start_ts":449526505023733761,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-71-g41fc0de5f","resolved_ts":449526505023733761,"checkpoint_ts":449526505023733761,"checkpoint_time":"2024-05-04 15:25:29.540"} PASS + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > check diff failed 2-th time, retry later check diff failed 1-th time, retry later coverage: 2.4% of statements in github.com/pingcap/tiflow/... < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:25:34 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/05fe91f3-b0b3-48ed-a12d-737093b19be6 {"id":"05fe91f3-b0b3-48ed-a12d-737093b19be6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807531} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427e28f9cc 05fe91f3-b0b3-48ed-a12d-737093b19be6 /tidb/cdc/default/default/upstream/7365042205321039533 {"id":7365042205321039533,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/05fe91f3-b0b3-48ed-a12d-737093b19be6 {"id":"05fe91f3-b0b3-48ed-a12d-737093b19be6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807531} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427e28f9cc 05fe91f3-b0b3-48ed-a12d-737093b19be6 /tidb/cdc/default/default/upstream/7365042205321039533 {"id":7365042205321039533,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/05fe91f3-b0b3-48ed-a12d-737093b19be6 {"id":"05fe91f3-b0b3-48ed-a12d-737093b19be6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807531} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427e28f9cc 05fe91f3-b0b3-48ed-a12d-737093b19be6 /tidb/cdc/default/default/upstream/7365042205321039533 {"id":7365042205321039533,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 15:25:34 CST 2024] <<<<<< START cdc server in kafka_compression case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.72027204.out server --log-file /tmp/tidb_cdc_test/kafka_compression/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_compression/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Create changefeed successfully! ID: test-normal Info: {"upstream_id":7365042205321039533,"namespace":"default","id":"test-normal","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-tables-ddl-test-normal-17525?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T15:25:34.904509181+08:00","start_ts":449526505548546049,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["multi_tables_ddl_test.t1","multi_tables_ddl_test.t2","multi_tables_ddl_test.t3","multi_tables_ddl_test.t4","multi_tables_ddl_test.t1_7","multi_tables_ddl_test.t2_7","multi_tables_ddl_test.finish_mark"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-71-g41fc0de5f","resolved_ts":449526505548546049,"checkpoint_ts":449526505548546049,"checkpoint_time":"2024-05-04 15:25:31.542"} Create changefeed successfully! ID: test-error-1 Info: {"upstream_id":7365042205321039533,"namespace":"default","id":"test-error-1","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-tables-ddl-test-error-1-5067?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T15:25:35.120833415+08:00","start_ts":449526505548546049,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["multi_tables_ddl_test.t5","multi_tables_ddl_test.t6","multi_tables_ddl_test.t7","multi_tables_ddl_test.t8"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-71-g41fc0de5f","resolved_ts":449526505548546049,"checkpoint_ts":449526505548546049,"checkpoint_time":"2024-05-04 15:25:31.542"} wait process 5583 exit for 1-th time... wait process 5583 exit for 2-th time... wait process 5583 exit for 3-th time... wait process 5583 exit for 4-th time... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Create changefeed successfully! ID: test-error-2 Info: {"upstream_id":7365042205321039533,"namespace":"default","id":"test-error-2","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-tables-ddl-test-error-2-11303?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T15:25:35.350890799+08:00","start_ts":449526505548546049,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["multi_tables_ddl_test.t9","multi_tables_ddl_test.t10"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-71-g41fc0de5f","resolved_ts":449526505548546049,"checkpoint_ts":449526505548546049,"checkpoint_time":"2024-05-04 15:25:31.542"} [Sat May 4 15:25:35 CST 2024] <<<<<< START kafka consumer in multi_tables_ddl_v2 case >>>>>> [Sat May 4 15:25:35 CST 2024] <<<<<< START kafka consumer in multi_tables_ddl_v2 case >>>>>> [Sat May 4 15:25:35 CST 2024] <<<<<< START kafka consumer in multi_tables_ddl_v2 case >>>>>> /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils/kill_cdc_pid: line 19: kill: (5583) - No such process wait process 5583 exit for 5-th time... process 5583 already exit [Sat May 4 15:25:35 CST 2024] <<<<<< START cdc server in ddl_manager case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/ExecuteDDLSlowly=return(true)' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_manager.57585760.out server --log-file /tmp/tidb_cdc_test/ddl_manager/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_manager/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + set +x [Sat May 4 15:25:36 CST 2024] <<<<<< START kafka consumer in drop_many_tables case >>>>>> check diff failed 3-th time, retry later table drop_tables.c not exists for 1-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:25:37 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/4b81205b-f68a-48b4-9dff-d4f5d5b04947 {"id":"4b81205b-f68a-48b4-9dff-d4f5d5b04947","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807535} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427e3e25cc 4b81205b-f68a-48b4-9dff-d4f5d5b04947 /tidb/cdc/default/default/upstream/7365042221777659985 {"id":7365042221777659985,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/4b81205b-f68a-48b4-9dff-d4f5d5b04947 {"id":"4b81205b-f68a-48b4-9dff-d4f5d5b04947","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807535} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427e3e25cc 4b81205b-f68a-48b4-9dff-d4f5d5b04947 /tidb/cdc/default/default/upstream/7365042221777659985 {"id":7365042221777659985,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/4b81205b-f68a-48b4-9dff-d4f5d5b04947 {"id":"4b81205b-f68a-48b4-9dff-d4f5d5b04947","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807535} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427e3e25cc 4b81205b-f68a-48b4-9dff-d4f5d5b04947 /tidb/cdc/default/default/upstream/7365042221777659985 {"id":7365042221777659985,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7264.out cli tso query --pd=http://127.0.0.1:2379 cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 15:25:38 CST 2024] <<<<<< run test case changefeed_reconstruct success! >>>>>> table multi_tables_ddl_test.t55 not exists for 1-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > table drop_tables.c not exists for 2-th check, retry later check diff failed 4-th time, retry later + set +x + tso='449526507290492932 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449526507290492932 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7303.out cli changefeed create --start-ts=449526507290492932 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-gzip-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=gzip' -c gzip ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Create changefeed successfully! ID: gzip Info: {"upstream_id":7365042221777659985,"namespace":"default","id":"gzip","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-gzip-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=gzip","create_time":"2024-05-04T15:25:40.119798695+08:00","start_ts":449526507290492932,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-71-g41fc0de5f","resolved_ts":449526507290492932,"checkpoint_ts":449526507290492932,"checkpoint_time":"2024-05-04 15:25:38.187"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... table multi_tables_ddl_test.t55 not exists for 2-th check, retry later check diff successfully < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:25:40 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7365042122591326955 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-507?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 15:25:15.56919285 +0800 CST StartTs:449526501321736197 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc001781200 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-71-g41fc0de5f Epoch:449526501361057793} {CheckpointTs:449526502016417807 MinTableBarrierTs:449526502016417807 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5930f970-abea-4b68-91b6-68b9ea65e27e {"id":"5930f970-abea-4b68-91b6-68b9ea65e27e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807535} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ddd82b8 5930f970-abea-4b68-91b6-68b9ea65e27e /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7365042122591326955,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-507?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T15:25:15.56919285+08:00","start-ts":449526501321736197,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-71-g41fc0de5f","epoch":449526501361057793} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449526502016417807,"min-table-barrier-ts":449526502016417807,"admin-job-type":0} /tidb/cdc/default/default/task/position/5930f970-abea-4b68-91b6-68b9ea65e27e/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365042122591326955 {"id":7365042122591326955,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7365042122591326955 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-507?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 15:25:15.56919285 +0800 CST StartTs:449526501321736197 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc001781200 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-71-g41fc0de5f Epoch:449526501361057793} {CheckpointTs:449526502016417807 MinTableBarrierTs:449526502016417807 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5930f970-abea-4b68-91b6-68b9ea65e27e {"id":"5930f970-abea-4b68-91b6-68b9ea65e27e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807535} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ddd82b8 5930f970-abea-4b68-91b6-68b9ea65e27e /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7365042122591326955,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-507?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T15:25:15.56919285+08:00","start-ts":449526501321736197,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-71-g41fc0de5f","epoch":449526501361057793} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449526502016417807,"min-table-barrier-ts":449526502016417807,"admin-job-type":0} /tidb/cdc/default/default/task/position/5930f970-abea-4b68-91b6-68b9ea65e27e/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365042122591326955 {"id":7365042122591326955,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + grep -q 'etcd info' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7365042122591326955 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-507?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 15:25:15.56919285 +0800 CST StartTs:449526501321736197 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc001781200 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-71-g41fc0de5f Epoch:449526501361057793} {CheckpointTs:449526502016417807 MinTableBarrierTs:449526502016417807 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5930f970-abea-4b68-91b6-68b9ea65e27e {"id":"5930f970-abea-4b68-91b6-68b9ea65e27e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807535} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ddd82b8 5930f970-abea-4b68-91b6-68b9ea65e27e /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7365042122591326955,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-507?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T15:25:15.56919285+08:00","start-ts":449526501321736197,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-71-g41fc0de5f","epoch":449526501361057793} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449526502016417807,"min-table-barrier-ts":449526502016417807,"admin-job-type":0} /tidb/cdc/default/default/task/position/5930f970-abea-4b68-91b6-68b9ea65e27e/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365042122591326955 {"id":7365042122591326955,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + break + set +x [Sat May 4 15:25:40 CST 2024] <<<<<< START cdc server in ddl_manager case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/ExecuteDDLSlowly=return(true)' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_manager.58155817.out server --log-file /tmp/tidb_cdc_test/ddl_manager/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_manager/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > table drop_tables.c not exists for 3-th check, retry later + set +x [Sat May 4 15:25:41 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828/tiflow-cdc) 3727142400 bytes in 37.20 secs (100198011 bytes/sec) [Pipeline] { [Pipeline] cache table multi_tables_ddl_test.t55 not exists for 3-th check, retry later /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/05/04 15:25:40.079 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 15:25:40.115 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 15:25:40.243 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 15:25:40.252 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 15:25:41.214 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 15:25:41.224 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"]: syntax error: operand expected (error token is "[2024/05/04 15:25:40.079 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 15:25:40.115 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 15:25:40.243 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 15:25:40.252 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 15:25:41.214 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 15:25:41.224 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"]") table test.gzip_finish_mark not exists for 1-th check, retry later table drop_tables.c not exists for 4-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09fa49ac0013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-9791l-h7cbr, pid:4575, start at 2024-05-04 15:25:42.410646599 +0800 CST m=+7.273770005 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:42.419 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:42.379 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:42.379 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:25:44 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact table test.gzip_finish_mark not exists for 2-th check, retry later + res=' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7365042122591326955 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-507?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 15:25:15.56919285 +0800 CST StartTs:449526501321736197 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc001781200 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-71-g41fc0de5f Epoch:449526501361057793} {CheckpointTs:449526502016417807 MinTableBarrierTs:449526502016417807 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5930f970-abea-4b68-91b6-68b9ea65e27e {"id":"5930f970-abea-4b68-91b6-68b9ea65e27e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807535} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ddd82b8 5930f970-abea-4b68-91b6-68b9ea65e27e /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7365042122591326955,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-507?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T15:25:15.56919285+08:00","start-ts":449526501321736197,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-71-g41fc0de5f","epoch":449526501361057793} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449526502147489816,"min-table-barrier-ts":449526502147489816,"admin-job-type":0} /tidb/cdc/default/default/task/position/5930f970-abea-4b68-91b6-68b9ea65e27e/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365042122591326955 {"id":7365042122591326955,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7365042122591326955 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-507?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 15:25:15.56919285 +0800 CST StartTs:449526501321736197 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc001781200 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-71-g41fc0de5f Epoch:449526501361057793} {CheckpointTs:449526502016417807 MinTableBarrierTs:449526502016417807 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5930f970-abea-4b68-91b6-68b9ea65e27e {"id":"5930f970-abea-4b68-91b6-68b9ea65e27e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807535} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ddd82b8 5930f970-abea-4b68-91b6-68b9ea65e27e /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7365042122591326955,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-507?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T15:25:15.56919285+08:00","start-ts":449526501321736197,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-71-g41fc0de5f","epoch":449526501361057793} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449526502147489816,"min-table-barrier-ts":449526502147489816,"admin-job-type":0} /tidb/cdc/default/default/task/position/5930f970-abea-4b68-91b6-68b9ea65e27e/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365042122591326955 {"id":7365042122591326955,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7365042122591326955 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-507?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 15:25:15.56919285 +0800 CST StartTs:449526501321736197 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc001781200 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-71-g41fc0de5f Epoch:449526501361057793} {CheckpointTs:449526502016417807 MinTableBarrierTs:449526502016417807 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5930f970-abea-4b68-91b6-68b9ea65e27e {"id":"5930f970-abea-4b68-91b6-68b9ea65e27e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807535} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ddd82b8 5930f970-abea-4b68-91b6-68b9ea65e27e /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7365042122591326955,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-507?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T15:25:15.56919285+08:00","start-ts":449526501321736197,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-71-g41fc0de5f","epoch":449526501361057793} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449526502147489816,"min-table-barrier-ts":449526502147489816,"admin-job-type":0} /tidb/cdc/default/default/task/position/5930f970-abea-4b68-91b6-68b9ea65e27e/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365042122591326955 {"id":7365042122591326955,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x table ddl_manager.finish_mark not exists for 1-th check, retry later table multi_tables_ddl_test.t55 not exists for 4-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09fa49ac0013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-9791l-h7cbr, pid:4575, start at 2024-05-04 15:25:42.410646599 +0800 CST m=+7.273770005 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:42.419 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:42.379 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:42.379 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09fa29900016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-9791l-h7cbr, pid:4659, start at 2024-05-04 15:25:40.34826196 +0800 CST m=+5.155562310 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:27:40.354 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:25:40.324 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:15:40.324 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/cdc/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/cdc/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/cdc/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/cdc/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/cdc/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table drop_tables.c not exists for 5-th check, retry later table test.gzip_finish_mark exists table ddl_manager.finish_mark not exists for 2-th check, retry later check diff successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7441.out cli changefeed pause -c gzip table multi_tables_ddl_test.t55 not exists for 5-th check, retry later PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... check diff failed 1-th time, retry later [Sat May 4 15:25:47 CST 2024] <<<<<< START cdc server in cdc case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cdc.60096011.out server --log-file /tmp/tidb_cdc_test/cdc/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/cdc/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table drop_tables.c exists check diff successfully table ddl_manager.finish_mark not exists for 3-th check, retry later + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7474.out cli changefeed remove -c gzip table multi_tables_ddl_test.t55 exists table multi_tables_ddl_test.t66 exists table multi_tables_ddl_test.t7 exists table multi_tables_ddl_test.t88 exists table multi_tables_ddl_test.finish_mark not exists for 1-th check, retry later wait process cdc.test exit for 1-th time... Changefeed remove successfully. ID: gzip CheckpointTs: 449526509466288145 SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-gzip-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=gzip PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... check diff failed 2-th time, retry later wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 15:25:49 CST 2024] <<<<<< run test case drop_many_tables success! >>>>>> table multi_tables_ddl_test.finish_mark exists check table exists success + endpoints=http://127.0.0.1:2379 + changefeed_id=test-normal + expected_state=normal + error_msg=null + tls_dir= + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test-normal -s + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7511.out cli tso query --pd=http://127.0.0.1:2379 + info='{ "upstream_id": 7365042205321039533, "namespace": "default", "id": "test-normal", "state": "normal", "checkpoint_tso": 449526507344232476, "checkpoint_time": "2024-05-04 15:25:38.392", "error": null }' + echo '{ "upstream_id": 7365042205321039533, "namespace": "default", "id": "test-normal", "state": "normal", "checkpoint_tso": 449526507344232476, "checkpoint_time": "2024-05-04 15:25:38.392", "error": null }' { "upstream_id": 7365042205321039533, "namespace": "default", "id": "test-normal", "state": "normal", "checkpoint_tso": 449526507344232476, "checkpoint_time": "2024-05-04 15:25:38.392", "error": null } ++ echo '{' '"upstream_id":' 7365042205321039533, '"namespace":' '"default",' '"id":' '"test-normal",' '"state":' '"normal",' '"checkpoint_tso":' 449526507344232476, '"checkpoint_time":' '"2024-05-04' '15:25:38.392",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365042205321039533, '"namespace":' '"default",' '"id":' '"test-normal",' '"state":' '"normal",' '"checkpoint_tso":' 449526507344232476, '"checkpoint_time":' '"2024-05-04' '15:25:38.392",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] + endpoints=http://127.0.0.1:2379 + changefeed_id=test-error-1 + expected_state=normal + error_msg=null + tls_dir= + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test-error-1 -s + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:25:50 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6a30a3de-6280-47f2-999a-bf370800d095 {"id":"6a30a3de-6280-47f2-999a-bf370800d095","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807547} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427e5e42fa 6a30a3de-6280-47f2-999a-bf370800d095 /tidb/cdc/default/default/upstream/7365042269312556223 {"id":7365042269312556223,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6a30a3de-6280-47f2-999a-bf370800d095 {"id":"6a30a3de-6280-47f2-999a-bf370800d095","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807547} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427e5e42fa 6a30a3de-6280-47f2-999a-bf370800d095 /tidb/cdc/default/default/upstream/7365042269312556223 {"id":7365042269312556223,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6a30a3de-6280-47f2-999a-bf370800d095 {"id":"6a30a3de-6280-47f2-999a-bf370800d095","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807547} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427e5e42fa 6a30a3de-6280-47f2-999a-bf370800d095 /tidb/cdc/default/default/upstream/7365042269312556223 {"id":7365042269312556223,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cdc.cli.6066.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-cdc-test-19735?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --config /tmp/tidb_cdc_test/cdc/pulsar_test.toml Create changefeed successfully! ID: a561d101-4cb8-4d64-92be-e7d13fb403bb Info: {"upstream_id":7365042269312556223,"namespace":"default","id":"a561d101-4cb8-4d64-92be-e7d13fb403bb","sink_uri":"kafka://127.0.0.1:9092/ticdc-cdc-test-19735?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T15:25:51.051679705+08:00","start_ts":449526510630993922,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-71-g41fc0de5f","resolved_ts":449526510630993922,"checkpoint_ts":449526510630993922,"checkpoint_time":"2024-05-04 15:25:50.930"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... table ddl_manager.finish_mark not exists for 4-th check, retry later + info='{ "upstream_id": 7365042205321039533, "namespace": "default", "id": "test-error-1", "state": "normal", "checkpoint_tso": 449526510398210053, "checkpoint_time": "2024-05-04 15:25:50.042", "error": null }' + echo '{ "upstream_id": 7365042205321039533, "namespace": "default", "id": "test-error-1", "state": "normal", "checkpoint_tso": 449526510398210053, "checkpoint_time": "2024-05-04 15:25:50.042", "error": null }' { "upstream_id": 7365042205321039533, "namespace": "default", "id": "test-error-1", "state": "normal", "checkpoint_tso": 449526510398210053, "checkpoint_time": "2024-05-04 15:25:50.042", "error": null } ++ echo '{' '"upstream_id":' 7365042205321039533, '"namespace":' '"default",' '"id":' '"test-error-1",' '"state":' '"normal",' '"checkpoint_tso":' 449526510398210053, '"checkpoint_time":' '"2024-05-04' '15:25:50.042",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365042205321039533, '"namespace":' '"default",' '"id":' '"test-error-1",' '"state":' '"normal",' '"checkpoint_tso":' 449526510398210053, '"checkpoint_time":' '"2024-05-04' '15:25:50.042",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] + endpoints=http://127.0.0.1:2379 + changefeed_id=test-error-2 + expected_state=failed + error_msg=ErrSyncRenameTableFailed + tls_dir= + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test-error-2 -s check diff failed 3-th time, retry later + info='{ "upstream_id": 7365042205321039533, "namespace": "default", "id": "test-error-2", "state": "failed", "checkpoint_tso": 449526506859266085, "checkpoint_time": "2024-05-04 15:25:36.542", "error": { "time": "2024-05-04T15:25:39.088602126+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSyncRenameTableFailed", "message": "[CDC:ErrSyncRenameTableFailed]table'\''s old name is not in filter rule, and its new name in filter rule table id '\''130'\'', ddl query: [rename table t11 to t9], it'\''s an unexpected behavior, if you want to replicate this table, please add its old name to filter rule." } }' + echo '{ "upstream_id": 7365042205321039533, "namespace": "default", "id": "test-error-2", "state": "failed", "checkpoint_tso": 449526506859266085, "checkpoint_time": "2024-05-04 15:25:36.542", "error": { "time": "2024-05-04T15:25:39.088602126+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSyncRenameTableFailed", "message": "[CDC:ErrSyncRenameTableFailed]table'\''s old name is not in filter rule, and its new name in filter rule table id '\''130'\'', ddl query: [rename table t11 to t9], it'\''s an unexpected behavior, if you want to replicate this table, please add its old name to filter rule." } }' { "upstream_id": 7365042205321039533, "namespace": "default", "id": "test-error-2", "state": "failed", "checkpoint_tso": 449526506859266085, "checkpoint_time": "2024-05-04 15:25:36.542", "error": { "time": "2024-05-04T15:25:39.088602126+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSyncRenameTableFailed", "message": "[CDC:ErrSyncRenameTableFailed]table's old name is not in filter rule, and its new name in filter rule table id '130', ddl query: [rename table t11 to t9], it's an unexpected behavior, if you want to replicate this table, please add its old name to filter rule." } } ++ jq -r .state ++ echo '{' '"upstream_id":' 7365042205321039533, '"namespace":' '"default",' '"id":' '"test-error-2",' '"state":' '"failed",' '"checkpoint_tso":' 449526506859266085, '"checkpoint_time":' '"2024-05-04' '15:25:36.542",' '"error":' '{' '"time":' '"2024-05-04T15:25:39.088602126+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrSyncRenameTableFailed",' '"message":' '"[CDC:ErrSyncRenameTableFailed]table'\''s' old name is not in filter rule, and its new name in filter rule table id ''\''130'\'',' ddl query: '[rename' table t11 to 't9],' 'it'\''s' an unexpected behavior, if you want to replicate this table, please add its old name to filter 'rule."' '}' '}' + state=failed + [[ ! failed == \f\a\i\l\e\d ]] ++ jq -r .error.message ++ echo '{' '"upstream_id":' 7365042205321039533, '"namespace":' '"default",' '"id":' '"test-error-2",' '"state":' '"failed",' '"checkpoint_tso":' 449526506859266085, '"checkpoint_time":' '"2024-05-04' '15:25:36.542",' '"error":' '{' '"time":' '"2024-05-04T15:25:39.088602126+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrSyncRenameTableFailed",' '"message":' '"[CDC:ErrSyncRenameTableFailed]table'\''s' old name is not in filter rule, and its new name in filter rule table id ''\''130'\'',' ddl query: '[rename' table t11 to 't9],' 'it'\''s' an unexpected behavior, if you want to replicate this table, please add its old name to filter 'rule."' '}' '}' + message='[CDC:ErrSyncRenameTableFailed]table'\''s old name is not in filter rule, and its new name in filter rule table id '\''130'\'', ddl query: [rename table t11 to t9], it'\''s an unexpected behavior, if you want to replicate this table, please add its old name to filter rule.' + [[ ! [CDC:ErrSyncRenameTableFailed]table's old name is not in filter rule, and its new name in filter rule table id '130', ddl query: [rename table t11 to t9], it's an unexpected behavior, if you want to replicate this table, please add its old name to filter rule. =~ ErrSyncRenameTableFailed ]] check diff successfully wait process cdc.test exit for 1-th time... + set +x [Sat May 4 15:25:52 CST 2024] <<<<<< START kafka consumer in cdc case >>>>>> go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 wait process cdc.test exit for 2-th time... + set +x + tso='449526510672150531 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449526510672150531 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7554.out cli changefeed create --start-ts=449526510672150531 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-snappy-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=snappy' -c snappy go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading go.uber.org/atomic v1.11.0 go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/coreos/go-semver v0.3.1 go: downloading github.com/golang/protobuf v1.5.4 go: downloading golang.org/x/net v0.24.0 go: downloading golang.org/x/sys v0.19.0 go: downloading google.golang.org/protobuf v1.33.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda table ddl_manager.finish_mark not exists for 5-th check, retry later wait process cdc.test exit for 3-th time... Create changefeed successfully! ID: snappy =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_capture/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Info: {"upstream_id":7365042221777659985,"namespace":"default","id":"snappy","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-snappy-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=snappy","create_time":"2024-05-04T15:25:52.985621367+08:00","start_ts":449526510672150531,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-71-g41fc0de5f","resolved_ts":449526510672150531,"checkpoint_ts":449526510672150531,"checkpoint_time":"2024-05-04 15:25:51.087"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... go: downloading golang.org/x/text v0.14.0 cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 15:25:53 CST 2024] <<<<<< run test case multi_tables_ddl_v2 success! >>>>>> check diff failed 4-th time, retry later + set +x [Sat May 4 15:25:54 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>> table ddl_manager.finish_mark not exists for 6-th check, retry later go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading golang.org/x/sync v0.7.0 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/google/uuid v1.6.0 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/stretchr/testify v1.9.0 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/tidwall/btree v1.7.0 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading golang.org/x/tools v0.20.0 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/google/btree v1.1.2 go: downloading github.com/golang/snappy v0.0.4 go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading cloud.google.com/go v0.112.2 go: downloading google.golang.org/api v0.170.0 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading golang.org/x/time v0.5.0 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading golang.org/x/crypto v0.22.0 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading github.com/jfcg/sixb v1.3.8 go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading github.com/robfig/cron v1.2.0 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/apache/thrift v0.16.0 go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/kr/text v0.2.0 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/05/04 15:25:52.930 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 15:25:52.981 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 15:25:53.066 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 15:25:53.076 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 15:25:54.064 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 15:25:54.073 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"]: syntax error: operand expected (error token is "[2024/05/04 15:25:52.930 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 15:25:52.981 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 15:25:53.066 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 15:25:53.076 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 15:25:54.064 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 15:25:54.073 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"]") table test.snappy_finish_mark not exists for 1-th check, retry later go: downloading github.com/go-logr/logr v1.4.1 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/go-logr/stdr v1.2.2 go: downloading github.com/DataDog/zstd v1.5.5 go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 check diff failed 5-th time, retry later start tidb cluster in /tmp/tidb_cdc_test/multi_capture Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table ddl_manager.finish_mark not exists for 7-th check, retry later table test.snappy_finish_mark not exists for 2-th check, retry later check diff successfully go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/google/s2a-go v0.1.7 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading github.com/felixge/httpsnoop v1.0.4 go: downloading github.com/jmespath/go-jmespath v0.4.0 Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table ddl_manager.finish_mark not exists for 8-th check, retry later go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd table test.snappy_finish_mark exists check diff successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7682.out cli changefeed pause -c snappy PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 9-th check, retry later + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7720.out cli changefeed remove -c snappy Changefeed remove successfully. ID: snappy CheckpointTs: 449526511681404959 SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-snappy-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=snappy PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... check diff failed 1-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 10-th check, retry later + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7756.out cli tso query --pd=http://127.0.0.1:2379 check diff failed 2-th time, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_stop_delay/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table ddl_manager.finish_mark not exists for 11-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + tso='449526514040700930 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449526514040700930 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7794.out cli changefeed create --start-ts=449526514040700930 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-lz4-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=lz4' -c lz4 Create changefeed successfully! ID: lz4 Info: {"upstream_id":7365042221777659985,"namespace":"default","id":"lz4","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-lz4-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=lz4","create_time":"2024-05-04T15:26:05.822979417+08:00","start_ts":449526514040700930,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-71-g41fc0de5f","resolved_ts":449526514040700930,"checkpoint_ts":449526514040700930,"checkpoint_time":"2024-05-04 15:26:03.937"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_topics_v2/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... check diff failed 3-th time, retry later table ddl_manager.finish_mark not exists for 12-th check, retry later + set +x VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09fbbb740005 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-hlzqq-jkp5b, pid:7558, start at 2024-05-04 15:26:06.045988965 +0800 CST m=+5.147612106 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:28:06.052 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:26:06.045 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:16:06.045 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09fbbb740005 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-hlzqq-jkp5b, pid:7558, start at 2024-05-04 15:26:06.045988965 +0800 CST m=+5.147612106 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:28:06.052 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:26:06.045 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:16:06.045 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09fbbc000010 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-hlzqq-jkp5b, pid:7644, start at 2024-05-04 15:26:06.096708096 +0800 CST m=+5.148721993 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:28:06.103 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:26:06.080 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:16:06.080 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/multi_capture/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/multi_capture/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_capture/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_capture/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_capture/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [Sat May 4 15:26:07 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>> check diff failed 4-th time, retry later /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/05/04 15:26:05.782 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 15:26:05.819 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 15:26:05.917 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 15:26:05.925 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 15:26:06.917 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 15:26:06.925 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"]: syntax error: operand expected (error token is "[2024/05/04 15:26:05.782 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 15:26:05.819 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 15:26:05.917 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 15:26:05.925 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 15:26:06.917 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 15:26:06.925 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"]") table test.lz4_finish_mark not exists for 1-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/multi_topics_v2 Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/processor_stop_delay Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.cli.9077.out cli tso query --pd=http://127.0.0.1:2379 table ddl_manager.finish_mark not exists for 13-th check, retry later check diff failed 5-th time, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table test.lz4_finish_mark not exists for 2-th check, retry later + set +x + tso='449526515510804481 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449526515510804481 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x table ddl_manager.finish_mark not exists for 14-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828/tiflow-cdc) 3727142400 bytes in 26.72 secs (139465321 bytes/sec) [Pipeline] { [Pipeline] cache Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ***************** properties ***************** "updateproportion"="0" "threadcount"="2" "insertproportion"="0" "readallfields"="true" "workload"="core" "mysql.db"="multi_capture_1" "mysql.port"="4000" "mysql.host"="127.0.0.1" "dotransactions"="false" "mysql.user"="root" "operationcount"="0" "requestdistribution"="uniform" "readproportion"="0" "recordcount"="10" "scanproportion"="0" ********************************************** Run finished, takes 9.227654ms INSERT - Takes(s): 0.0, Count: 10, OPS: 2052.9, Avg(us): 1766, Min(us): 1066, Max(us): 4311, 95th(us): 5000, 99th(us): 5000 check diff successfully Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table ddl_manager.finish_mark not exists for 15-th check, retry later ***************** properties ***************** "scanproportion"="0" "mysql.port"="4000" "operationcount"="0" "requestdistribution"="uniform" "workload"="core" "insertproportion"="0" "recordcount"="10" "mysql.db"="multi_capture_2" "updateproportion"="0" "readallfields"="true" "readproportion"="0" "mysql.user"="root" "threadcount"="2" "mysql.host"="127.0.0.1" "dotransactions"="false" ********************************************** Run finished, takes 8.553537ms INSERT - Takes(s): 0.0, Count: 10, OPS: 2175.3, Avg(us): 1637, Min(us): 877, Max(us): 3874, 95th(us): 4000, 99th(us): 4000 table test.lz4_finish_mark exists check diff successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7933.out cli changefeed pause -c lz4 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ***************** properties ***************** "mysql.user"="root" "updateproportion"="0" "mysql.port"="4000" "threadcount"="2" "mysql.host"="127.0.0.1" "dotransactions"="false" "readallfields"="true" "insertproportion"="0" "scanproportion"="0" "workload"="core" "requestdistribution"="uniform" "mysql.db"="multi_capture_3" "readproportion"="0" "recordcount"="10" "operationcount"="0" ********************************************** Run finished, takes 9.634768ms INSERT - Takes(s): 0.0, Count: 10, OPS: 1701.4, Avg(us): 1840, Min(us): 1005, Max(us): 3668, 95th(us): 4000, 99th(us): 4000 Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 16-th check, retry later + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7968.out cli changefeed remove -c lz4 ***************** properties ***************** "operationcount"="0" "scanproportion"="0" "workload"="core" "updateproportion"="0" "threadcount"="2" "mysql.db"="multi_capture_4" "requestdistribution"="uniform" "readproportion"="0" "insertproportion"="0" "mysql.host"="127.0.0.1" "dotransactions"="false" "readallfields"="true" "mysql.user"="root" "recordcount"="10" "mysql.port"="4000" ********************************************** Run finished, takes 9.428264ms INSERT - Takes(s): 0.0, Count: 10, OPS: 1948.1, Avg(us): 1783, Min(us): 943, Max(us): 4185, 95th(us): 5000, 99th(us): 5000 [Sat May 4 15:26:15 CST 2024] <<<<<< START cdc server in multi_capture case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.92149216.out server --log-file /tmp/tidb_cdc_test/multi_capture/cdc1.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_capture/cdc_data1 --cluster-id default --addr 127.0.0.1:8301 + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8301 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8301; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Changefeed remove successfully. ID: lz4 CheckpointTs: 449526515049955356 SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-lz4-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=lz4 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 17-th check, retry later + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.8005.out cli tso query --pd=http://127.0.0.1:2379 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09fc68d00014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-62dq1-4td68, pid:7022, start at 2024-05-04 15:26:17.165575456 +0800 CST m=+5.183665710 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:28:17.171 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:26:17.140 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:16:17.140 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8301 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8301 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8301 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:26:18 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a177d637-1d36-4025-aab8-f427eaef6187 {"id":"a177d637-1d36-4025-aab8-f427eaef6187","address":"127.0.0.1:8301","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807575} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ec8ba49 a177d637-1d36-4025-aab8-f427eaef6187 /tidb/cdc/default/default/upstream/7365042379664508575 {"id":7365042379664508575,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a177d637-1d36-4025-aab8-f427eaef6187 {"id":"a177d637-1d36-4025-aab8-f427eaef6187","address":"127.0.0.1:8301","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807575} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ec8ba49 a177d637-1d36-4025-aab8-f427eaef6187 /tidb/cdc/default/default/upstream/7365042379664508575 {"id":7365042379664508575,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a177d637-1d36-4025-aab8-f427eaef6187 {"id":"a177d637-1d36-4025-aab8-f427eaef6187","address":"127.0.0.1:8301","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807575} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ec8ba49 a177d637-1d36-4025-aab8-f427eaef6187 /tidb/cdc/default/default/upstream/7365042379664508575 {"id":7365042379664508575,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 15:26:18 CST 2024] <<<<<< START cdc server in multi_capture case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.92779279.out server --log-file /tmp/tidb_cdc_test/multi_capture/cdc2.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_capture/cdc_data2 --cluster-id default --addr 127.0.0.1:8302 + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8302 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8302; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff failed 1-th time, retry later table ddl_manager.finish_mark not exists for 18-th check, retry later + set +x + tso='449526517658025986 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449526517658025986 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.8038.out cli changefeed create --start-ts=449526517658025986 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-zstd-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=zstd' -c zstd Create changefeed successfully! ID: zstd Info: {"upstream_id":7365042221777659985,"namespace":"default","id":"zstd","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-zstd-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=zstd","create_time":"2024-05-04T15:26:19.616890872+08:00","start_ts":449526517658025986,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-71-g41fc0de5f","resolved_ts":449526517658025986,"checkpoint_ts":449526517658025986,"checkpoint_time":"2024-05-04 15:26:17.736"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... check diff failed 2-th time, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828/tiflow-cdc) 3727142400 bytes in 6.73 secs (553624018 bytes/sec) [Pipeline] { [Pipeline] cache VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09fc68d00014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-62dq1-4td68, pid:7022, start at 2024-05-04 15:26:17.165575456 +0800 CST m=+5.183665710 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:28:17.171 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:26:17.140 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:16:17.140 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09fc697c000c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-62dq1-4td68, pid:7109, start at 2024-05-04 15:26:17.194201255 +0800 CST m=+5.155575421 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:28:17.199 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:26:17.183 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:16:17.183 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/processor_stop_delay/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/processor_stop_delay/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/processor_stop_delay/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/processor_stop_delay/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/processor_stop_delay/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09fc92780003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j1vs1-n66zh, pid:9231, start at 2024-05-04 15:26:19.808025655 +0800 CST m=+5.357017036 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:28:19.815 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:26:19.806 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:16:19.806 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x [Sat May 4 15:26:21 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>> table ddl_manager.finish_mark not exists for 19-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8302 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8302 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8302 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:26:21 GMT < Content-Length: 1271 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/52c9c54c-cc9d-457e-934c-0f003196d860 {"id":"52c9c54c-cc9d-457e-934c-0f003196d860","address":"127.0.0.1:8302","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807578} /tidb/cdc/default/__cdc_meta__/capture/a177d637-1d36-4025-aab8-f427eaef6187 {"id":"a177d637-1d36-4025-aab8-f427eaef6187","address":"127.0.0.1:8301","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807575} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ec8ba49 a177d637-1d36-4025-aab8-f427eaef6187 /tidb/cdc/default/__cdc_meta__/owner/22318f427ec8ba7e 52c9c54c-cc9d-457e-934c-0f003196d860 /tidb/cdc/default/default/upstream/7365042379664508575 {"id":7365042379664508575,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/52c9c54c-cc9d-457e-934c-0f003196d860 {"id":"52c9c54c-cc9d-457e-934c-0f003196d860","address":"127.0.0.1:8302","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807578} /tidb/cdc/default/__cdc_meta__/capture/a177d637-1d36-4025-aab8-f427eaef6187 {"id":"a177d637-1d36-4025-aab8-f427eaef6187","address":"127.0.0.1:8301","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807575} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ec8ba49 a177d637-1d36-4025-aab8-f427eaef6187 /tidb/cdc/default/__cdc_meta__/owner/22318f427ec8ba7e 52c9c54c-cc9d-457e-934c-0f003196d860 /tidb/cdc/default/default/upstream/7365042379664508575 {"id":7365042379664508575,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/52c9c54c-cc9d-457e-934c-0f003196d860 {"id":"52c9c54c-cc9d-457e-934c-0f003196d860","address":"127.0.0.1:8302","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807578} /tidb/cdc/default/__cdc_meta__/capture/a177d637-1d36-4025-aab8-f427eaef6187 {"id":"a177d637-1d36-4025-aab8-f427eaef6187","address":"127.0.0.1:8301","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807575} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ec8ba49 a177d637-1d36-4025-aab8-f427eaef6187 /tidb/cdc/default/__cdc_meta__/owner/22318f427ec8ba7e 52c9c54c-cc9d-457e-934c-0f003196d860 /tidb/cdc/default/default/upstream/7365042379664508575 {"id":7365042379664508575,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 15:26:21 CST 2024] <<<<<< START cdc server in multi_capture case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8303/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.93339335.out server --log-file /tmp/tidb_cdc_test/multi_capture/cdc3.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_capture/cdc_data3 --cluster-id default --addr 127.0.0.1:8303 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8303/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8303 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8303; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [Sat May 4 15:26:22 CST 2024] <<<<<< START cdc server in processor_stop_delay case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/processorStopDelay=1*sleep(10000)' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.processor_stop_delay.85038505.out server --log-file /tmp/tidb_cdc_test/processor_stop_delay/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/processor_stop_delay/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff failed 3-th time, retry later /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/05/04 15:26:19.582 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 15:26:19.613 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 15:26:19.717 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 15:26:19.725 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 15:26:20.717 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 15:26:20.724 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"]: syntax error: operand expected (error token is "[2024/05/04 15:26:19.582 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 15:26:19.613 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 15:26:19.717 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 15:26:19.725 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 15:26:20.717 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 15:26:20.724 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"]") table test.zstd_finish_mark not exists for 1-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09fc92780003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j1vs1-n66zh, pid:9231, start at 2024-05-04 15:26:19.808025655 +0800 CST m=+5.357017036 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:28:19.815 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:26:19.806 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:16:19.806 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09fc93240014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j1vs1-n66zh, pid:9320, start at 2024-05-04 15:26:19.885308823 +0800 CST m=+5.375247636 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:28:19.892 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:26:19.849 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:16:19.849 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/error.log arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_topics_v2/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_topics_v2/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table ddl_manager.finish_mark not exists for 20-th check, retry later Aborted by Jenkins Admin check diff failed 4-th time, retry later table test.zstd_finish_mark not exists for 2-th check, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_topics_v2.cli.10643.out cli tso query --pd=http://127.0.0.1:2379 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8303/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8303 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8303 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8303 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:26:24 GMT < Content-Length: 1750 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/36f32d26-66e6-431c-a8e8-fbed02eacbe0 {"id":"36f32d26-66e6-431c-a8e8-fbed02eacbe0","address":"127.0.0.1:8303","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807581} /tidb/cdc/default/__cdc_meta__/capture/52c9c54c-cc9d-457e-934c-0f003196d860 {"id":"52c9c54c-cc9d-457e-934c-0f003196d860","address":"127.0.0.1:8302","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807578} /tidb/cdc/default/__cdc_meta__/capture/a177d637-1d36-4025-aab8-f427eaef6187 {"id":"a177d637-1d36-4025-aab8-f427eaef6187","address":"127.0.0.1:8301","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807575} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ec8ba49 a177d637-1d36-4025-aab8-f427eaef6187 /tidb/cdc/default/__cdc_meta__/owner/22318f427ec8ba7e 52c9c54c-cc9d-457e-934c-0f003196d860 /tidb/cdc/default/__cdc_meta__/owner/22318f427ec8ba9e 36f32d26-66e6-431c-a8e8-fbed02eacbe0 /tidb/cdc/default/default/upstream/7365042379664508575 {"id":7365042379664508575,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/36f32d26-66e6-431c-a8e8-fbed02eacbe0 {"id":"36f32d26-66e6-431c-a8e8-fbed02eacbe0","address":"127.0.0.1:8303","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807581} /tidb/cdc/default/__cdc_meta__/capture/52c9c54c-cc9d-457e-934c-0f003196d860 {"id":"52c9c54c-cc9d-457e-934c-0f003196d860","address":"127.0.0.1:8302","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807578} /tidb/cdc/default/__cdc_meta__/capture/a177d637-1d36-4025-aab8-f427eaef6187 {"id":"a177d637-1d36-4025-aab8-f427eaef6187","address":"127.0.0.1:8301","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807575} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ec8ba49 a177d637-1d36-4025-aab8-f427eaef6187 /tidb/cdc/default/__cdc_meta__/owner/22318f427ec8ba7e 52c9c54c-cc9d-457e-934c-0f003196d860 /tidb/cdc/default/__cdc_meta__/owner/22318f427ec8ba9e 36f32d26-66e6-431c-a8e8-fbed02eacbe0 /tidb/cdc/default/default/upstream/7365042379664508575 {"id":7365042379664508575,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/36f32d26-66e6-431c-a8e8-fbed02eacbe0 {"id":"36f32d26-66e6-431c-a8e8-fbed02eacbe0","address":"127.0.0.1:8303","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807581} /tidb/cdc/default/__cdc_meta__/capture/52c9c54c-cc9d-457e-934c-0f003196d860 {"id":"52c9c54c-cc9d-457e-934c-0f003196d860","address":"127.0.0.1:8302","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807578} /tidb/cdc/default/__cdc_meta__/capture/a177d637-1d36-4025-aab8-f427eaef6187 {"id":"a177d637-1d36-4025-aab8-f427eaef6187","address":"127.0.0.1:8301","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807575} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ec8ba49 a177d637-1d36-4025-aab8-f427eaef6187 /tidb/cdc/default/__cdc_meta__/owner/22318f427ec8ba7e 52c9c54c-cc9d-457e-934c-0f003196d860 /tidb/cdc/default/__cdc_meta__/owner/22318f427ec8ba9e 36f32d26-66e6-431c-a8e8-fbed02eacbe0 /tidb/cdc/default/default/upstream/7365042379664508575 {"id":7365042379664508575,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.cli.9391.out cli changefeed create --start-ts=449526515510804481 '--sink-uri=kafka://127.0.0.1:9092/ticdc-multi-capture-test-29707?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --server=127.0.0.1:8301 table ddl_manager.finish_mark not exists for 21-th check, retry later Create changefeed successfully! ID: d3778bff-3da7-44b8-9fbc-12eeb032a936 Info: {"upstream_id":7365042379664508575,"namespace":"default","id":"d3778bff-3da7-44b8-9fbc-12eeb032a936","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-capture-test-29707?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T15:26:24.962896953+08:00","start_ts":449526515510804481,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-71-g41fc0de5f","resolved_ts":449526515510804481,"checkpoint_ts":449526515510804481,"checkpoint_time":"2024-05-04 15:26:09.545"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:26:25 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3b6de0f7-f4a2-4817-a7d1-e5b98b82e596 {"id":"3b6de0f7-f4a2-4817-a7d1-e5b98b82e596","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807582} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ef60dcc 3b6de0f7-f4a2-4817-a7d1-e5b98b82e596 /tidb/cdc/default/default/upstream/7365042425104947295 {"id":7365042425104947295,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3b6de0f7-f4a2-4817-a7d1-e5b98b82e596 {"id":"3b6de0f7-f4a2-4817-a7d1-e5b98b82e596","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807582} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ef60dcc 3b6de0f7-f4a2-4817-a7d1-e5b98b82e596 /tidb/cdc/default/default/upstream/7365042425104947295 {"id":7365042425104947295,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3b6de0f7-f4a2-4817-a7d1-e5b98b82e596 {"id":"3b6de0f7-f4a2-4817-a7d1-e5b98b82e596","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807582} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ef60dcc 3b6de0f7-f4a2-4817-a7d1-e5b98b82e596 /tidb/cdc/default/default/upstream/7365042425104947295 {"id":7365042425104947295,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 15:26:25 CST 2024] <<<<<< START kafka consumer in processor_stop_delay case >>>>>> table test.zstd_finish_mark exists check diff successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.8175.out cli changefeed pause -c zstd + set +x [Sat May 4 15:26:26 CST 2024] <<<<<< START kafka consumer in multi_capture case >>>>>> table multi_capture_1.usertable not exists for 1-th check, retry later PASS + set +x + tso='449526519590027265 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449526519590027265 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 15:26:26 CST 2024] <<<<<< START cdc server in multi_topics_v2 case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_topics_v2.1067710679.out server --log-file /tmp/tidb_cdc_test/multi_topics_v2/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_topics_v2/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff failed 5-th time, retry later coverage: 2.0% of statements in github.com/pingcap/tiflow/... table processor_stop_delay.t not exists for 1-th check, retry later table ddl_manager.finish_mark not exists for 22-th check, retry later + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.8212.out cli changefeed remove -c zstd table multi_capture_1.usertable not exists for 2-th check, retry later check diff successfully table processor_stop_delay.t exists check diff failed 1-th time, retry later Changefeed remove successfully. ID: zstd CheckpointTs: 449526518667280405 SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-zstd-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=zstd PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... table ddl_manager.finish_mark not exists for 23-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:26:29 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3d6de188-3cb2-4af1-9d32-b9c5c43e8865 {"id":"3d6de188-3cb2-4af1-9d32-b9c5c43e8865","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807586} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ef7ddd4 3d6de188-3cb2-4af1-9d32-b9c5c43e8865 /tidb/cdc/default/default/upstream/7365042438636759252 {"id":7365042438636759252,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3d6de188-3cb2-4af1-9d32-b9c5c43e8865 {"id":"3d6de188-3cb2-4af1-9d32-b9c5c43e8865","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807586} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ef7ddd4 3d6de188-3cb2-4af1-9d32-b9c5c43e8865 /tidb/cdc/default/default/upstream/7365042438636759252 {"id":7365042438636759252,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3d6de188-3cb2-4af1-9d32-b9c5c43e8865 {"id":"3d6de188-3cb2-4af1-9d32-b9c5c43e8865","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807586} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427ef7ddd4 3d6de188-3cb2-4af1-9d32-b9c5c43e8865 /tidb/cdc/default/default/upstream/7365042438636759252 {"id":7365042438636759252,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_topics_v2.cli.10739.out cli changefeed create --start-ts=449526519590027265 '--sink-uri=kafka://127.0.0.1:9092/multi_topics?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1' --config /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_topics_v2/conf/changefeed.toml + set +x Create changefeed successfully! ID: 7c2cf767-437d-4bc5-b00f-49b9cd62b99e Info: {"upstream_id":7365042438636759252,"namespace":"default","id":"7c2cf767-437d-4bc5-b00f-49b9cd62b99e","sink_uri":"kafka://127.0.0.1:9092/multi_topics?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1","create_time":"2024-05-04T15:26:30.127518865+08:00","start_ts":449526519590027265,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"dispatchers":[{"matcher":["test.*"],"topic":"{schema}_{table}"}],"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-71-g41fc0de5f","resolved_ts":449526519590027265,"checkpoint_ts":449526519590027265,"checkpoint_time":"2024-05-04 15:26:25.106"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828/tiflow-cdc) 3727142400 bytes in 7.97 secs (467685676 bytes/sec) [Pipeline] { [Pipeline] cache wait process cdc.test exit for 1-th time... table multi_capture_1.usertable exists table multi_capture_2.usertable exists table multi_capture_3.usertable not exists for 1-th check, retry later wait process cdc.test exit for 2-th time... check diff successfully table ddl_manager.finish_mark not exists for 24-th check, retry later + set +x cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 15:26:31 CST 2024] <<<<<< run test case kafka_compression success! >>>>>> check diff failed 1-th time, retry later table multi_capture_3.usertable exists table multi_capture_4.usertable not exists for 1-th check, retry later check diff failed 1-th time, retry later table ddl_manager.finish_mark not exists for 25-th check, retry later table multi_capture_4.usertable exists check diff failed 1-th time, retry later check diff failed 2-th time, retry later table ddl_manager.finish_mark not exists for 26-th check, retry later check diff failed 2-th time, retry later table ddl_manager.finish_mark not exists for 27-th check, retry later check diff failed 3-th time, retry later check diff successfully ***************** properties ***************** "threadcount"="2" "operationcount"="0" "updateproportion"="0" "requestdistribution"="uniform" "readproportion"="0" "mysql.port"="4000" "scanproportion"="0" "mysql.db"="multi_capture_1" "recordcount"="20" "workload"="core" "readallfields"="true" "insertproportion"="0" "mysql.host"="127.0.0.1" "mysql.user"="root" "dotransactions"="false" ********************************************** Run finished, takes 11.323955ms INSERT - Takes(s): 0.0, Count: 20, OPS: 1989.5, Avg(us): 988, Min(us): 542, Max(us): 1787, 95th(us): 2000, 99th(us): 2000 ***************** properties ***************** "readproportion"="0" "scanproportion"="0" "recordcount"="20" "insertproportion"="0" "readallfields"="true" "operationcount"="0" "mysql.db"="multi_capture_2" "threadcount"="2" "mysql.port"="4000" "mysql.host"="127.0.0.1" "mysql.user"="root" "updateproportion"="0" "workload"="core" "requestdistribution"="uniform" "dotransactions"="false" ********************************************** Run finished, takes 10.537879ms INSERT - Takes(s): 0.0, Count: 20, OPS: 2162.6, Avg(us): 1005, Min(us): 503, Max(us): 1572, 95th(us): 2000, 99th(us): 2000 ***************** properties ***************** "dotransactions"="false" "threadcount"="2" "readallfields"="true" "mysql.host"="127.0.0.1" "mysql.port"="4000" "readproportion"="0" "insertproportion"="0" "updateproportion"="0" "workload"="core" "mysql.db"="multi_capture_3" "operationcount"="0" "mysql.user"="root" "requestdistribution"="uniform" "scanproportion"="0" "recordcount"="20" ********************************************** Run finished, takes 10.42931ms INSERT - Takes(s): 0.0, Count: 20, OPS: 2181.1, Avg(us): 972, Min(us): 509, Max(us): 1587, 95th(us): 2000, 99th(us): 2000 ***************** properties ***************** "requestdistribution"="uniform" "mysql.port"="4000" "mysql.user"="root" "workload"="core" "readproportion"="0" "dotransactions"="false" "operationcount"="0" "recordcount"="20" "threadcount"="2" "mysql.host"="127.0.0.1" "updateproportion"="0" "mysql.db"="multi_capture_4" "readallfields"="true" "insertproportion"="0" "scanproportion"="0" ********************************************** Run finished, takes 12.102407ms INSERT - Takes(s): 0.0, Count: 20, OPS: 1862.0, Avg(us): 1146, Min(us): 548, Max(us): 1966, 95th(us): 2000, 99th(us): 2000 check diff failed 1-th time, retry later check diff failed 3-th time, retry later Click here to forcibly terminate running steps check diff failed 4-th time, retry later check diff successfully check diff failed 4-th time, retry later table ddl_manager.finish_mark not exists for 28-th check, retry later wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... wait process cdc.test exit for 3-th time... check diff successfully cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 15:26:41 CST 2024] <<<<<< run test case multi_capture success! >>>>>> table ddl_manager.finish_mark not exists for 29-th check, retry later check diff failed 5-th time, retry later Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1828-w3vlr-3jkkt table ddl_manager.finish_mark not exists for 30-th check, retry later check diff failed 6-th time, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_messages/run.sh using Sink-Type: kafka... <<================= [Sat May 4 15:26:43 CST 2024] <<<<<< run test case kafka_messages success! >>>>>> check diff failed 1-th time, retry later table ddl_manager.finish_mark not exists for 31-th check, retry later check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_sink_error_resume/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828/tiflow-cdc) 3727142400 bytes in 14.17 secs (263109742 bytes/sec) [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 15:26:46 CST 2024] <<<<<< run test case processor_stop_delay success! >>>>>> [Pipeline] { [Pipeline] container [Pipeline] { table ddl_manager.finish_mark not exists for 32-th check, retry later [Pipeline] container [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] sh [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh ERROR: Failed to launch pingcap-tiflow-pull-cdc-integration-kafka-test-1828-w3vlr-26nhw java.lang.IllegalStateException: Node was deleted, computer is null at org.csanchez.jenkins.plugins.kubernetes.KubernetesLauncher.launch(KubernetesLauncher.java:203) at hudson.slaves.SlaveComputer.lambda$_connect$0(SlaveComputer.java:297) at jenkins.util.ContextResettingExecutorService$2.call(ContextResettingExecutorService.java:46) at jenkins.security.ImpersonatingExecutorService$2.call(ImpersonatingExecutorService.java:80) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:829) + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh check diff failed 2-th time, retry later + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh table ddl_manager.finish_mark not exists for 33-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/kafka_sink_error_resume Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] archiveArtifacts Archiving artifacts check diff failed 3-th time, retry later + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 Recording fingerprints table ddl_manager.finish_mark not exists for 34-th check, retry later check diff failed 4-th time, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table ddl_manager.finish_mark not exists for 35-th check, retry later check diff failed 5-th time, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff successfully ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 36-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/partition_table/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/run.sh using Sink-Type: kafka... <<================= +++ dirname /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/run.sh ++ cd /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo ++ pwd + CUR=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo + source /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/../_utils/test_prepare ++ UP_TIDB_HOST=127.0.0.1 ++ UP_TIDB_PORT=4000 ++ UP_TIDB_OTHER_PORT=4001 ++ UP_TIDB_STATUS=10080 ++ UP_TIDB_OTHER_STATUS=10081 ++ DOWN_TIDB_HOST=127.0.0.1 ++ DOWN_TIDB_PORT=3306 ++ DOWN_TIDB_STATUS=20080 ++ TLS_TIDB_HOST=127.0.0.1 ++ TLS_TIDB_PORT=3307 ++ TLS_TIDB_STATUS=30080 ++ UP_PD_HOST_1=127.0.0.1 ++ UP_PD_PORT_1=2379 ++ UP_PD_PEER_PORT_1=2380 ++ UP_PD_HOST_2=127.0.0.1 ++ UP_PD_PORT_2=2679 ++ UP_PD_PEER_PORT_2=2680 ++ UP_PD_HOST_3=127.0.0.1 ++ UP_PD_PORT_3=2779 ++ UP_PD_PEER_PORT_3=2780 ++ DOWN_PD_HOST=127.0.0.1 ++ DOWN_PD_PORT=2479 ++ DOWN_PD_PEER_PORT=2480 ++ TLS_PD_HOST=127.0.0.1 ++ TLS_PD_PORT=2579 ++ TLS_PD_PEER_PORT=2580 ++ UP_TIKV_HOST_1=127.0.0.1 ++ UP_TIKV_PORT_1=20160 ++ UP_TIKV_STATUS_PORT_1=20181 ++ UP_TIKV_HOST_2=127.0.0.1 ++ UP_TIKV_PORT_2=20161 ++ UP_TIKV_STATUS_PORT_2=20182 ++ UP_TIKV_HOST_3=127.0.0.1 ++ UP_TIKV_PORT_3=20162 ++ UP_TIKV_STATUS_PORT_3=20183 ++ DOWN_TIKV_HOST=127.0.0.1 ++ DOWN_TIKV_PORT=21160 ++ DOWN_TIKV_STATUS_PORT=21180 ++ TLS_TIKV_HOST=127.0.0.1 ++ TLS_TIKV_PORT=22160 ++ TLS_TIKV_STATUS_PORT=22180 +++ cat /tmp/tidb_cdc_test/KAFKA_VERSION +++ echo 2.4.1 ++ KAFKA_VERSION=2.4.1 + WORK_DIR=/tmp/tidb_cdc_test/synced_status_with_redo + CDC_BINARY=cdc.test + SINK_TYPE=kafka + CDC_COUNT=3 + DB_COUNT=4 + trap stop_tidb_cluster EXIT + run_normal_case_and_unavailable_pd conf/changefeed-redo.toml + rm -rf /tmp/tidb_cdc_test/synced_status_with_redo + mkdir -p /tmp/tidb_cdc_test/synced_status_with_redo + start_tidb_cluster --workdir /tmp/tidb_cdc_test/synced_status_with_redo The 1 times to try to start tidb cluster... table ddl_manager.finish_mark not exists for 37-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 1-th time, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09ff0b100009 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rwb1z-5xmkw, pid:9326, start at 2024-05-04 15:27:00.302038144 +0800 CST m=+5.270379842 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:29:00.308 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:27:00.292 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:17:00.292 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 38-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/partition_table Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/synced_status_with_redo Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... check diff failed 2-th time, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09ff0b100009 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rwb1z-5xmkw, pid:9326, start at 2024-05-04 15:27:00.302038144 +0800 CST m=+5.270379842 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:29:00.308 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:27:00.292 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:17:00.292 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09ff0a780018 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-rwb1z-5xmkw, pid:9415, start at 2024-05-04 15:27:00.292743528 +0800 CST m=+5.208122006 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:29:00.299 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:27:00.303 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:17:00.303 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/log/error.log arg matches is ArgMatches { args: {"log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table ddl_manager.finish_mark not exists for 39-th check, retry later check diff failed 3-th time, retry later table ddl_manager.finish_mark not exists for 40-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... [Sat May 4 15:27:05 CST 2024] <<<<<< START cdc server in kafka_sink_error_resume case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/sink/dmlsink/mq/dmlproducer/KafkaSinkAsyncSendError=1*return(true)' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_sink_error_resume.1087210874.out server --log-file /tmp/tidb_cdc_test/kafka_sink_error_resume/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_sink_error_resume/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release check diff failed 4-th time, retry later table ddl_manager.finish_mark not exists for 41-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 07:27:08 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/81c58aec-6d81-4046-8ff4-df6fe80a53c2 {"id":"81c58aec-6d81-4046-8ff4-df6fe80a53c2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807625} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427f99c7ca 81c58aec-6d81-4046-8ff4-df6fe80a53c2 /tidb/cdc/default/default/upstream/7365042610818797934 {"id":7365042610818797934,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/81c58aec-6d81-4046-8ff4-df6fe80a53c2 {"id":"81c58aec-6d81-4046-8ff4-df6fe80a53c2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807625} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427f99c7ca 81c58aec-6d81-4046-8ff4-df6fe80a53c2 /tidb/cdc/default/default/upstream/7365042610818797934 {"id":7365042610818797934,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/81c58aec-6d81-4046-8ff4-df6fe80a53c2 {"id":"81c58aec-6d81-4046-8ff4-df6fe80a53c2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-71-g41fc0de5f","git-hash":"41fc0de5f6af46f8da184375c7b63207ef1c184b","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714807625} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f427f99c7ca 81c58aec-6d81-4046-8ff4-df6fe80a53c2 /tidb/cdc/default/default/upstream/7365042610818797934 {"id":7365042610818797934,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 15:27:08 CST 2024] <<<<<< START kafka consumer in kafka_sink_error_resume case >>>>>> Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff successfully check_changefeed_status 127.0.0.1:8300 88702d0b-2ed4-4e6b-9cc3-c736895441d7 warning last_warning kafka sink injected error + endpoint=127.0.0.1:8300 + changefeed_id=88702d0b-2ed4-4e6b-9cc3-c736895441d7 + expected_state=warning + field=last_warning + error_pattern=kafka ++ curl 127.0.0.1:8300/api/v2/changefeeds/88702d0b-2ed4-4e6b-9cc3-c736895441d7/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 86 100 86 0 0 791 0 --:--:-- --:--:-- --:--:-- 796 + info='{"state":"normal","resolved_ts":449526530963668996,"checkpoint_ts":449526530963668996}' + echo '{"state":"normal","resolved_ts":449526530963668996,"checkpoint_ts":449526530963668996}' {"state":"normal","resolved_ts":449526530963668996,"checkpoint_ts":449526530963668996} ++ echo '{"state":"normal","resolved_ts":449526530963668996,"checkpoint_ts":449526530963668996}' ++ jq -r .state table ddl_manager.finish_mark not exists for 42-th check, retry later + state=normal + [[ ! normal == \w\a\r\n\i\n\g ]] + echo 'changefeed state normal does not equal to warning' changefeed state normal does not equal to warning + exit 1 run task failed 1-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check_changefeed_status 127.0.0.1:8300 88702d0b-2ed4-4e6b-9cc3-c736895441d7 warning last_warning kafka sink injected error + endpoint=127.0.0.1:8300 + changefeed_id=88702d0b-2ed4-4e6b-9cc3-c736895441d7 + expected_state=warning + field=last_warning + error_pattern=kafka ++ curl 127.0.0.1:8300/api/v2/changefeeds/88702d0b-2ed4-4e6b-9cc3-c736895441d7/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09ffa9a80015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-62dq1-4td68, pid:10106, start at 2024-05-04 15:27:10.46416684 +0800 CST m=+5.180707764 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:29:10.471 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:27:10.442 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:17:10.442 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09ffa9a80015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-62dq1-4td68, pid:10106, start at 2024-05-04 15:27:10.46416684 +0800 CST m=+5.180707764 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:29:10.471 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:27:10.442 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:17:10.442 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09ffaa5c0003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-62dq1-4td68, pid:10196, start at 2024-05-04 15:27:10.48905361 +0800 CST m=+5.150332994 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:29:10.495 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:27:10.487 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:17:10.487 +0800 All versions after safe point can be accessed. (DO NOT EDIT) 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 86 100 86 0 0 785 0 --:--:-- --:--:-- --:--:-- 788 + info='{"state":"normal","resolved_ts":449526531042312217,"checkpoint_ts":449526531042312217}' + echo '{"state":"normal","resolved_ts":449526531042312217,"checkpoint_ts":449526531042312217}' {"state":"normal","resolved_ts":449526531042312217,"checkpoint_ts":449526531042312217} ++ echo '{"state":"normal","resolved_ts":449526531042312217,"checkpoint_ts":449526531042312217}' ++ jq -r .state + state=normal + [[ ! normal == \w\a\r\n\i\n\g ]] + echo 'changefeed state normal does not equal to warning' changefeed state normal does not equal to warning + exit 1 run task failed 2-th time, retry later Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/partition_table/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/partition_table/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/partition_table/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/partition_table/tiflash/log/proxy.log"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/partition_table/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table ddl_manager.finish_mark not exists for 43-th check, retry later Killing processes kill finished with exit code 2 Killing processes ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) kill finished with exit code 2 Killing processes kill finished with exit code 2 Killing processes kill finished with exit code 2 Killing processes check diff failed 1-th time, retry later kill finished with exit code 2 Killing processes + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.partition_table.cli.11654.out cli tso query --pd=http://127.0.0.1:2379 kill finished with exit code 2 Killing processes table ddl_manager.finish_mark not exists for 44-th check, retry later kill finished with exit code 2 Sending interrupt signal to process Killing processes kill finished with exit code 0 Sending interrupt signal to process Killing processes script returned exit code 143 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d09ffdb400017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1828-hlzqq-jkp5b, pid:10837, start at 2024-05-04 15:27:13.665170194 +0800 CST m=+5.264024393 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-15:29:13.674 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-15:27:13.666 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-15:17:13.666 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) kill finished with exit code 0 Sending interrupt signal to process Killing processes ++ stop_tidb_cluster kill finished with exit code 0 Killing processes script returned exit code 143 kill finished with exit code 2 Sending interrupt signal to process Killing processes kill finished with exit code 0 Sending interrupt signal to process Killing processes + set +x + tso='449526532379246593 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449526532379246593 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 15:27:15 CST 2024] <<<<<< START cdc server in partition_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.partition_table.1170411706.out server --log-file /tmp/tidb_cdc_test/partition_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/partition_table/cdc_data --cluster-id default + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 kill finished with exit code 0 Sending interrupt signal to process Killing processes check_changefeed_status 127.0.0.1:8300 88702d0b-2ed4-4e6b-9cc3-c736895441d7 warning last_warning kafka sink injected error + endpoint=127.0.0.1:8300 + changefeed_id=88702d0b-2ed4-4e6b-9cc3-c736895441d7 + expected_state=warning + field=last_warning + error_pattern=kafka ++ curl 127.0.0.1:8300/api/v2/changefeeds/88702d0b-2ed4-4e6b-9cc3-c736895441d7/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 2206 0 --:--:-- --:--:-- --:--:-- 2218 + info='{"state":"warning","resolved_ts":449526532693557252,"checkpoint_ts":449526531068264474,"last_warning":{"time":"2024-05-04T15:27:12.778398001+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449526532693557252,"checkpoint_ts":449526531068264474,"last_warning":{"time":"2024-05-04T15:27:12.778398001+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449526532693557252,"checkpoint_ts":449526531068264474,"last_warning":{"time":"2024-05-04T15:27:12.778398001+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449526532693557252,"checkpoint_ts":449526531068264474,"last_warning":{"time":"2024-05-04T15:27:12.778398001+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \w\a\r\n\i\n\g ]] + [[ -z last_warning ]] ++ echo '{"state":"warning","resolved_ts":449526532693557252,"checkpoint_ts":449526531068264474,"last_warning":{"time":"2024-05-04T15:27:12.778398001+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .last_warning.message + error_msg='kafka sink injected error' + [[ ! kafka sink injected error =~ kafka ]] run task successfully table ddl_manager.finish_mark not exists for 45-th check, retry later script returned exit code 143 kill finished with exit code 0 Sending interrupt signal to process Killing processes {"level":"warn","ts":1714807635.922822,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0023b8540/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: EOF"} script returned exit code 143 kill finished with exit code 0 Sending interrupt signal to process Killing processes check_changefeed_status 127.0.0.1:8300 88702d0b-2ed4-4e6b-9cc3-c736895441d7 normal + endpoint=127.0.0.1:8300 + changefeed_id=88702d0b-2ed4-4e6b-9cc3-c736895441d7 + expected_state=normal + field= + error_pattern= ++ curl 127.0.0.1:8300/api/v2/changefeeds/88702d0b-2ed4-4e6b-9cc3-c736895441d7/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 1839 0 --:--:-- --:--:-- --:--:-- 1848 + info='{"state":"warning","resolved_ts":449526532693557252,"checkpoint_ts":449526531068264474,"last_warning":{"time":"2024-05-04T15:27:12.778398001+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449526532693557252,"checkpoint_ts":449526531068264474,"last_warning":{"time":"2024-05-04T15:27:12.778398001+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449526532693557252,"checkpoint_ts":449526531068264474,"last_warning":{"time":"2024-05-04T15:27:12.778398001+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449526532693557252,"checkpoint_ts":449526531068264474,"last_warning":{"time":"2024-05-04T15:27:12.778398001+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \n\o\r\m\a\l ]] + echo 'changefeed state warning does not equal to normal' changefeed state warning does not equal to normal + exit 1 run task failed 1-th time, retry later kill finished with exit code 0 {"level":"warn","ts":1714807636.4724903,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc002438380/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: EOF"} script returned exit code 143 script returned exit code 143 [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } script returned exit code 143 [Pipeline] // node [Pipeline] } [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // timeout [Pipeline] // timeout {"level":"warn","ts":1714807635.6500142,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001fd4700/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: EOF"} {"level":"warn","ts":1714807637.6499808,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001fd4700/127.0.0.1:2379","attempt":1,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} script returned exit code 143 [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] // podTemplate [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // timeout [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] sh [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G11 Run cases: resolve_lock move_table autorandom generate_column PROW_JOB_ID=b2dca6a5-5209-4287-bddb-a889752d92c6 JENKINS_NODE_COOKIE=84cfff41-9e19-42f4-b9d1-98a434f49b3c BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-kc51m-zptlq HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786654730638856195","prowjobid":"b2dca6a5-5209-4287-bddb-a889752d92c6","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"41fc0de5f6af46f8da184375c7b63207ef1c184b","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/41fc0de5f6af46f8da184375c7b63207ef1c184b","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1828 TEST_GROUP=G11 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786654730638856195 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-kc51m GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-kc51m pingcap-tiflow-pull-cdc-integration-kafka-test-1828-kc51m-zptlq GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-kc51m-zptlq GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1828 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/resolve_lock/run.sh using Sink-Type: kafka... <<================= [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G12 Run cases: many_pk_or_uk capture_session_done_during_task ddl_attributes PROW_JOB_ID=b2dca6a5-5209-4287-bddb-a889752d92c6 JENKINS_NODE_COOKIE=3eee2fea-382c-40a3-a4d4-6a15c866d68f BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-3vxcl-phpbf HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786654730638856195","prowjobid":"b2dca6a5-5209-4287-bddb-a889752d92c6","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"41fc0de5f6af46f8da184375c7b63207ef1c184b","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/41fc0de5f6af46f8da184375c7b63207ef1c184b","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1828 TEST_GROUP=G12 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786654730638856195 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-3vxcl GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-3vxcl pingcap-tiflow-pull-cdc-integration-kafka-test-1828-3vxcl-phpbf GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-3vxcl-phpbf GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1828 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/many_pk_or_uk/run.sh using Sink-Type: kafka... <<================= [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G14 Run cases: changefeed_finish force_replicate_table PROW_JOB_ID=b2dca6a5-5209-4287-bddb-a889752d92c6 JENKINS_NODE_COOKIE=60f2011d-e801-4c5b-a5e8-584cde5b3b70 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7vp46-wk2j4 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786654730638856195","prowjobid":"b2dca6a5-5209-4287-bddb-a889752d92c6","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"41fc0de5f6af46f8da184375c7b63207ef1c184b","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/41fc0de5f6af46f8da184375c7b63207ef1c184b","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1828 TEST_GROUP=G14 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786654730638856195 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-7vp46 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-7vp46 pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7vp46-wk2j4 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7vp46-wk2j4 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1828 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_finish/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G13 Run cases: tiflash region_merge common_1 PROW_JOB_ID=b2dca6a5-5209-4287-bddb-a889752d92c6 JENKINS_NODE_COOKIE=aa12855f-2e17-4c2a-9314-ee38647254de BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7tmjt-f026b HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786654730638856195","prowjobid":"b2dca6a5-5209-4287-bddb-a889752d92c6","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"41fc0de5f6af46f8da184375c7b63207ef1c184b","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/41fc0de5f6af46f8da184375c7b63207ef1c184b","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1828 TEST_GROUP=G13 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786654730638856195 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-7tmjt GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7tmjt-f026b pingcap_tiflow_pull_cdc_integration_kafka_test_1828-7tmjt GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-7tmjt-f026b GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1828 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/tiflash/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G06 Run cases: sink_retry changefeed_error ddl_sequence resourcecontrol PROW_JOB_ID=b2dca6a5-5209-4287-bddb-a889752d92c6 JENKINS_NODE_COOKIE=8ff450e9-a007-44c0-bea5-a982c2ed6af6 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-mmjq9-38gpz HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786654730638856195","prowjobid":"b2dca6a5-5209-4287-bddb-a889752d92c6","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"41fc0de5f6af46f8da184375c7b63207ef1c184b","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/41fc0de5f6af46f8da184375c7b63207ef1c184b","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1828 TEST_GROUP=G06 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786654730638856195 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-mmjq9 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-mmjq9-38gpz pingcap_tiflow_pull_cdc_integration_kafka_test_1828-mmjq9 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-mmjq9-38gpz GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1828 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sink_retry/run.sh using Sink-Type: kafka... <<================= [Pipeline] // cache [Pipeline] } [Pipeline] // withEnv [Pipeline] // dir The 1 times to try to start tidb cluster... + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G17 Run cases: clustered_index processor_resolved_ts_fallback PROW_JOB_ID=b2dca6a5-5209-4287-bddb-a889752d92c6 JENKINS_NODE_COOKIE=b3f6b9e2-e97b-48e5-a580-69d9faa0e716 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-39dcv-ksc05 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786654730638856195","prowjobid":"b2dca6a5-5209-4287-bddb-a889752d92c6","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"41fc0de5f6af46f8da184375c7b63207ef1c184b","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/41fc0de5f6af46f8da184375c7b63207ef1c184b","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1828 TEST_GROUP=G17 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786654730638856195 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-39dcv GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-39dcv pingcap-tiflow-pull-cdc-integration-kafka-test-1828-39dcv-ksc05 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-39dcv-ksc05 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1828 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/clustered_index/run.sh using Sink-Type: kafka... <<================= [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // container [Sat May 4 15:27:25 CST 2024] <<<<<< skip test case clustered_index for kafka! >>>>>> find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_resolved_ts_fallback/run.sh using Sink-Type: kafka... <<================= [Sat May 4 15:27:25 CST 2024] <<<<<< run test case processor_resolved_ts_fallback success! >>>>>> [Pipeline] // container [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] sh [Pipeline] sh [Pipeline] // timeout [Pipeline] } [Pipeline] // cache [Pipeline] } + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G16 Run cases: owner_resign processor_etcd_worker_delay sink_hang PROW_JOB_ID=b2dca6a5-5209-4287-bddb-a889752d92c6 JENKINS_NODE_COOKIE=4d62ea92-1648-4c5c-a041-5afde735516b BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j56gh-stdlt HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786654730638856195","prowjobid":"b2dca6a5-5209-4287-bddb-a889752d92c6","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"41fc0de5f6af46f8da184375c7b63207ef1c184b","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/41fc0de5f6af46f8da184375c7b63207ef1c184b","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1828 TEST_GROUP=G16 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786654730638856195 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-j56gh GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j56gh-stdlt pingcap_tiflow_pull_cdc_integration_kafka_test_1828-j56gh GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-j56gh-stdlt GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1828 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/owner_resign/run.sh using Sink-Type: kafka... <<================= [Sat May 4 15:27:26 CST 2024] <<<<<< run test case owner_resign success! >>>>>> [Pipeline] // dir [Pipeline] } [Pipeline] // stage [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G10' [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G15 [Pipeline] // stage [Pipeline] } [Pipeline] // dir [Pipeline] } Run cases: new_ci_collation batch_add_table multi_rocks PROW_JOB_ID=b2dca6a5-5209-4287-bddb-a889752d92c6 JENKINS_NODE_COOKIE=fd05b88e-270e-48e1-9e52-4bb666d08f9d BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-88ts5-58zqs HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786654730638856195","prowjobid":"b2dca6a5-5209-4287-bddb-a889752d92c6","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"41fc0de5f6af46f8da184375c7b63207ef1c184b","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/41fc0de5f6af46f8da184375c7b63207ef1c184b","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1828 TEST_GROUP=G15 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786654730638856195 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1828/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1828-88ts5 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-88ts5-58zqs pingcap_tiflow_pull_cdc_integration_kafka_test_1828-88ts5 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1828-88ts5-58zqs GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1828 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/new_ci_collation/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] // withCredentials [Pipeline] } start tidb cluster in /tmp/tidb_cdc_test/changefeed_finish Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Sending interrupt signal to process Killing processes start tidb cluster in /tmp/tidb_cdc_test/tiflash Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... kill finished with exit code 0 Sending interrupt signal to process Killing processes The 1 times to try to start tidb cluster... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... kill finished with exit code 0 Sending interrupt signal to process Killing processes script returned exit code 143 The 1 times to try to start tidb cluster... script returned exit code 143 kill finished with exit code 0 Sending interrupt signal to process Killing processes script returned exit code 143 \033[0;36m<<< Run all test success >>>\033[0m kill finished with exit code 0 Sending interrupt signal to process Killing processes kill finished with exit code 0 Sending interrupt signal to process Killing processes script returned exit code 143 Verifying downstream PD is started... script returned exit code 143 find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_etcd_worker_delay/run.sh using Sink-Type: kafka... <<================= [Sat May 4 15:27:29 CST 2024] <<<<<< run test case processor_etcd_worker_delay success! >>>>>> kill finished with exit code 0 Sending interrupt signal to process Killing processes script returned exit code 143 kill finished with exit code 0 Killing processes kill finished with exit code 1 [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout script returned exit code 143 [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1828/tiflow-cdc already exists) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // cache [Pipeline] } [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // container [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // stage [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G01' [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G00' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G02' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G03' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G05' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G08' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G09' [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G07' [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G04' [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G06' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G11' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G12' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G13' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G14' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G15' [Pipeline] } [Pipeline] // stage [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G16' [Pipeline] // parallel [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] End of Pipeline org.jenkinsci.plugins.workflow.actions.ErrorAction$ErrorId: 5464b20c-fe67-4b1e-a700-49fa38ca6f0e Failed in branch Matrix - TEST_GROUP = 'G10' org.jenkinsci.plugins.workflow.actions.ErrorAction$ErrorId: 75fbe1a0-abc4-4a04-a5c8-23b09de76934 org.jenkinsci.plugins.workflow.actions.ErrorAction$ErrorId: 75fbe1a0-abc4-4a04-a5c8-23b09de76934 org.jenkinsci.plugins.workflow.actions.ErrorAction$ErrorId: 75fbe1a0-abc4-4a04-a5c8-23b09de76934 org.jenkinsci.plugins.workflow.actions.ErrorAction$ErrorId: 75fbe1a0-abc4-4a04-a5c8-23b09de76934 org.jenkinsci.plugins.workflow.actions.ErrorAction$ErrorId: 75fbe1a0-abc4-4a04-a5c8-23b09de76934 org.jenkinsci.plugins.workflow.actions.ErrorAction$ErrorId: 75fbe1a0-abc4-4a04-a5c8-23b09de76934 org.jenkinsci.plugins.workflow.actions.ErrorAction$ErrorId: 75fbe1a0-abc4-4a04-a5c8-23b09de76934 org.jenkinsci.plugins.workflow.actions.ErrorAction$ErrorId: 75fbe1a0-abc4-4a04-a5c8-23b09de76934 script returned exit code 1 org.jenkinsci.plugins.workflow.actions.ErrorAction$ErrorId: 75fbe1a0-abc4-4a04-a5c8-23b09de76934 Finished: ABORTED