Started by user Jenkins Admin Obtained pipelines/pingcap/tiflow/latest/pull_cdc_integration_kafka_test.groovy from git https://github.com/PingCAP-QE/ci.git Loading library tipipeline@main Library tipipeline@main is cached. Copying from home. [Pipeline] Start of Pipeline [Pipeline] readJSON [Pipeline] readTrusted Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1735-q9n5k-0hctk Still waiting to schedule task ‘pingcap-tiflow-pull-cdc-integration-kafka-test-1735-q9n5k-0hctk’ is offline Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1735-q9n5k-bd39h Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1735-q9n5k-0k531 Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1735-q9n5k-6bs41 ERROR: Failed to launch pingcap-tiflow-pull-cdc-integration-kafka-test-1735-q9n5k-0hctk io.fabric8.kubernetes.client.KubernetesClientTimeoutException: Timed out waiting for [1000000] milliseconds for [Pod] with name:[pingcap-tiflow-pull-cdc-integration-kafka-test-1735-q9n5k-0hctk] in namespace [jenkins-tiflow]. at io.fabric8.kubernetes.client.dsl.internal.BaseOperation.waitUntilCondition(BaseOperation.java:939) at io.fabric8.kubernetes.client.dsl.internal.BaseOperation.waitUntilReady(BaseOperation.java:921) at io.fabric8.kubernetes.client.dsl.internal.BaseOperation.waitUntilReady(BaseOperation.java:97) at org.csanchez.jenkins.plugins.kubernetes.KubernetesLauncher.launch(KubernetesLauncher.java:185) at hudson.slaves.SlaveComputer.lambda$_connect$0(SlaveComputer.java:297) at jenkins.util.ContextResettingExecutorService$2.call(ContextResettingExecutorService.java:46) at jenkins.security.ImpersonatingExecutorService$2.call(ImpersonatingExecutorService.java:80) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:829) Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1735-q9n5k-6bs41 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1735-q9n5k-blrhq --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "1c023037a9e2e6893b4b3e642cc9a5b580234afb" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1735-q9n5k" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-q9n5k-6bs41" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-q9n5k-6bs41" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-q9n5k-6bs41" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1735-q9n5k-6bs41 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] stage [Pipeline] { (Declarative: Checkout SCM) [Pipeline] checkout The recommended git tool is: git No credentials specified Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Avoid second fetch Checking out Revision cb9bc9e8822a01a5d59a2f670fb429e588065145 (origin/main) Commit message: "feat(tidb): test flashbacktest package (#2942)" > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f cb9bc9e8822a01a5d59a2f670fb429e588065145 # timeout=10 > git rev-list --no-walk cb9bc9e8822a01a5d59a2f670fb429e588065145 # timeout=10 [Pipeline] } [Pipeline] // stage [Pipeline] withEnv [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] timeout Timeout set to expire in 1 hr 5 min [Pipeline] { [Pipeline] stage [Pipeline] { (Debug info) [Pipeline] sh + printenv PROW_JOB_ID=586cc306-42d6-4207-90bf-aa4c613fe26b JENKINS_NODE_COOKIE=22a1c984-638d-4976-baa3-1f2bf192afdf BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-q9n5k-6bs41 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Debug info BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1783813226958426112","prowjobid":"586cc306-42d6-4207-90bf-aa4c613fe26b","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","base_link":"https://github.com/pingcap/tiflow/commit/d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","pulls":[{"number":10904,"author":"CharlesCheung96","sha":"16f5d59f936001f6d7031387873b3c668f3c5ae6","title":"redo(ticdc): enable pprof and set memory limit for redo applier","link":"https://github.com/pingcap/tiflow/pull/10904","commit_link":"https://github.com/pingcap/tiflow/pull/10904/commits/16f5d59f936001f6d7031387873b3c668f3c5ae6","author_link":"https://github.com/CharlesCheung96"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct _=/usr/bin/printenv POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test HUDSON_URL=https://do.pingcap.net/jenkins/ JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1735 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1783813226958426112 GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=618760b8f23481032f6d0010db684d46f840e8e1 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=tests SHLVL=3 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-q9n5k GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-236df335481f9578f70eb859f68d5ceead3aa27f6c9385fda1ec4c08661c0305 NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-q9n5k-6bs41 pingcap_tiflow_pull_cdc_integration_kafka_test_1735-q9n5k GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-q9n5k-6bs41 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1735 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz + echo ------------------------- ------------------------- + go env GO111MODULE='' GOARCH='amd64' GOBIN='' GOCACHE='/home/jenkins/.cache/go-build' GOENV='/home/jenkins/.config/go/env' GOEXE='' GOEXPERIMENT='' GOFLAGS='' GOHOSTARCH='amd64' GOHOSTOS='linux' GOINSECURE='' GOMODCACHE='/go/pkg/mod' GONOPROXY='' GONOSUMDB='' GOOS='linux' GOPATH='/go' GOPRIVATE='' GOPROXY='http://goproxy.apps.svc,https://proxy.golang.org,direct' GOROOT='/usr/local/go' GOSUMDB='sum.golang.org' GOTMPDIR='' GOTOOLCHAIN='auto' GOTOOLDIR='/usr/local/go/pkg/tool/linux_amd64' GOVCS='' GOVERSION='go1.21.0' GCCGO='gccgo' GOAMD64='v1' AR='ar' CC='gcc' CXX='g++' CGO_ENABLED='1' GOMOD='/dev/null' GOWORK='' CGO_CFLAGS='-O2 -g' CGO_CPPFLAGS='' CGO_CXXFLAGS='-O2 -g' CGO_FFLAGS='-O2 -g' CGO_LDFLAGS='-O2 -g' PKG_CONFIG='pkg-config' GOGCCFLAGS='-fPIC -m64 -pthread -Wl,--no-gc-sections -fmessage-length=0 -fdebug-prefix-map=/tmp/go-build665224871=/tmp/go-build -gno-record-gcc-switches' + echo ------------------------- ------------------------- + echo 'debug command: kubectl -n jenkins-tiflow exec -ti pingcap-tiflow-pull-cdc-integration-kafka-test-1735-q9n5k-6bs41 bash' debug command: kubectl -n jenkins-tiflow exec -ti pingcap-tiflow-pull-cdc-integration-kafka-test-1735-q9n5k-6bs41 bash [Pipeline] container [Pipeline] { [Pipeline] sh + dig github.com ; <<>> DiG 9.18.16 <<>> github.com ;; global options: +cmd ;; Got answer: ;; ->>HEADER<<- opcode: QUERY, status: NOERROR, id: 19833 ;; flags: qr aa rd ra; QUERY: 1, ANSWER: 1, AUTHORITY: 0, ADDITIONAL: 1 ;; OPT PSEUDOSECTION: ; EDNS: version: 0, flags:; udp: 1232 ; COOKIE: 765d38a675d66b0a (echoed) ;; QUESTION SECTION: ;github.com. IN A ;; ANSWER SECTION: github.com. 17 IN A 20.205.243.166 ;; Query time: 1 msec ;; SERVER: 169.254.25.10#53(169.254.25.10) (UDP) ;; WHEN: Fri Apr 26 11:18:50 UTC 2024 ;; MSG SIZE rcvd: 77 [Pipeline] script [Pipeline] { [Pipeline] } [Pipeline] // script [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Check diff files) [Pipeline] container [Pipeline] { [Pipeline] script [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $token [Pipeline] { [Pipeline] httpRequest Warning: A secret was passed to "httpRequest" using Groovy String interpolation, which is insecure. Affected argument(s) used the following variable(s): [token] See https://jenkins.io/redirect/groovy-string-interpolation for details. HttpMethod: GET URL: https://api.github.com/repos/pingcap/tiflow/pulls/10904/files?page=1&per_page=100 Content-Type: application/json Authorization: ***** Sending request to url: https://api.github.com/repos/pingcap/tiflow/pulls/10904/files?page=1&per_page=100 Response Code: HTTP/1.1 200 OK Success: Status code 200 is in the accepted range: 100:399 [Pipeline] httpRequest Warning: A secret was passed to "httpRequest" using Groovy String interpolation, which is insecure. Affected argument(s) used the following variable(s): [token] See https://jenkins.io/redirect/groovy-string-interpolation for details. HttpMethod: GET URL: https://api.github.com/repos/pingcap/tiflow/pulls/10904/files?page=2&per_page=100 Content-Type: application/json Authorization: ***** Sending request to url: https://api.github.com/repos/pingcap/tiflow/pulls/10904/files?page=2&per_page=100 Response Code: HTTP/1.1 200 OK Success: Status code 200 is in the accepted range: 100:399 [Pipeline] } [Pipeline] // withCredentials [Pipeline] echo pr_diff_files: [cdc/redo/reader/reader.go, pkg/cmd/redo/apply.go, pkg/util/memory.go] [Pipeline] echo diff file not matched: cdc/redo/reader/reader.go [Pipeline] } [Pipeline] // script [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Checkout) [Pipeline] timeout Timeout set to expire in 10 min [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache Cache restored successfully (git/pingcap/tiflow/rev-d0329d7-16f5d59) 196999168 bytes in 1.02 secs (192862143 bytes/sec) [Pipeline] { [Pipeline] retry [Pipeline] { [Pipeline] script [Pipeline] { [Pipeline] sh git version 2.36.6 Reinitialized existing Git repository in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/.git/ .git HEAD is now at 16f5d59f9 fix lint POST git-upload-pack (656 bytes) From https://github.com/pingcap/tiflow = [up to date] master -> origin/master = [up to date] refs/pull/10904/head -> origin/pr/10904/head Previous HEAD position was 16f5d59f9 fix lint HEAD is now at d0329d7f1 ddl_puller (ticdc): handle dorp pk/uk ddl correctly (#10965) 🚧 Checkouting to base SHA:d0329d7f1ca9a1d0de81a565051a09fe7e9231bd... HEAD is now at d0329d7f1 ddl_puller (ticdc): handle dorp pk/uk ddl correctly (#10965) ✅ Checked. 🎉 🧾 HEAD info: d0329d7f1ca9a1d0de81a565051a09fe7e9231bd d0329d7f1 ddl_puller (ticdc): handle dorp pk/uk ddl correctly (#10965) 4fd08424c simple (ticdc): reduce map access and memory allocation to save some cpu (#10942) 830e1622c Makefile: bump version to v8.2.0-master (#10893) 🚧 Pre-merge heads of pull requests to base SHA: d0329d7f1ca9a1d0de81a565051a09fe7e9231bd ... Updating d0329d7f1..16f5d59f9 Fast-forward cdc/redo/reader/reader.go | 7 +------ pkg/cmd/redo/apply.go | 38 +++++++++++++++++++++++++++++++++++++- pkg/util/memory.go | 28 ++++++++++++++++++---------- 3 files changed, 56 insertions(+), 17 deletions(-) 🧾 Pre-merged result: 16f5d59f936001f6d7031387873b3c668f3c5ae6 16f5d59f9 fix lint 95337060d fix lint a2e0ac5a2 remove wwait ✅ Pre merged 🎉 ✅ ~~~~~All done.~~~~~~ [Pipeline] } [Pipeline] // script [Pipeline] } [Pipeline] // retry [Pipeline] } Cache not saved (git/pingcap/tiflow/rev-d0329d7-16f5d59 already exists) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (prepare) [Pipeline] timeout Timeout set to expire in 20 min [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/third_party_download [Pipeline] { [Pipeline] retry [Pipeline] { [Pipeline] sh + cd ../tiflow + ./scripts/download-integration-test-binaries.sh master Download binaries... % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 769 0 --:--:-- --:--:-- --:--:-- 759 100 41 100 41 0 0 768 0 --:--:-- --:--:-- --:--:-- 759 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 888 0 --:--:-- --:--:-- --:--:-- 891 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 940 0 --:--:-- --:--:-- --:--:-- 953 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 2770 0 --:--:-- --:--:-- --:--:-- 2928 >>> download tidb-server.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/tidb/944fff519c90039747affb94067439ff9541f2fb/centos7/tidb-server.tar.gz 2024-04-26 19:19:18 URL:http://fileserver.pingcap.net/download/builds/pingcap/tidb/944fff519c90039747affb94067439ff9541f2fb/centos7/tidb-server.tar.gz [536672508/536672508] -> "tmp/tidb-server.tar.gz" [1] >>> download pd-server.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/pd/50c80407cd57e96a85452dd1601fcb41c1f263cf/centos7/pd-server.tar.gz 2024-04-26 19:19:35 URL:http://fileserver.pingcap.net/download/builds/pingcap/pd/50c80407cd57e96a85452dd1601fcb41c1f263cf/centos7/pd-server.tar.gz [187359310/187359310] -> "tmp/pd-server.tar.gz" [1] >>> download tikv-server.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/tikv/2118288ceb3f52a4a8431acba4fc2c560def0732/centos7/tikv-server.tar.gz 2024-04-26 19:20:16 URL:http://fileserver.pingcap.net/download/builds/pingcap/tikv/2118288ceb3f52a4a8431acba4fc2c560def0732/centos7/tikv-server.tar.gz [918823274/918823274] -> "tmp/tikv-server.tar.gz" [1] >>> download tiflash.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/tiflash/master/37701038fbd30f5eafac11c3ad180f14c6dcab9e/centos7/tiflash.tar.gz 2024-04-26 19:20:36 URL:http://fileserver.pingcap.net/download/builds/pingcap/tiflash/master/37701038fbd30f5eafac11c3ad180f14c6dcab9e/centos7/tiflash.tar.gz [456037936/456037936] -> "tmp/tiflash.tar.gz" [1] >>> download minio.tar.gz from http://fileserver.pingcap.net/download/minio.tar.gz 2024-04-26 19:20:41 URL:http://fileserver.pingcap.net/download/minio.tar.gz [17718777/17718777] -> "tmp/minio.tar.gz" [1] >>> download go-ycsb from http://fileserver.pingcap.net/download/builds/pingcap/go-ycsb/test-br/go-ycsb 2024-04-26 19:20:44 URL:http://fileserver.pingcap.net/download/builds/pingcap/go-ycsb/test-br/go-ycsb [45975512/45975512] -> "third_bin/go-ycsb" [1] >>> download jq from http://fileserver.pingcap.net/download/builds/pingcap/test/jq-1.6/jq-linux64 2024-04-26 19:20:44 URL:http://fileserver.pingcap.net/download/builds/pingcap/test/jq-1.6/jq-linux64 [3953824/3953824] -> "third_bin/jq" [1] >>> download etcd.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/cdc/etcd-v3.4.7-linux-amd64.tar.gz 2024-04-26 19:20:45 URL:http://fileserver.pingcap.net/download/builds/pingcap/cdc/etcd-v3.4.7-linux-amd64.tar.gz [17310840/17310840] -> "tmp/etcd.tar.gz" [1] >>> download sync_diff_inspector.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/cdc/sync_diff_inspector_hash-d671b084_linux-amd64.tar.gz 2024-04-26 19:20:49 URL:http://fileserver.pingcap.net/download/builds/pingcap/cdc/sync_diff_inspector_hash-d671b084_linux-amd64.tar.gz [79877126/79877126] -> "tmp/sync_diff_inspector.tar.gz" [1] >>> download schema-registry.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/cdc/schema-registry.tar.gz 2024-04-26 19:20:59 URL:http://fileserver.pingcap.net/download/builds/pingcap/cdc/schema-registry.tar.gz [278386006/278386006] -> "tmp/schema-registry.tar.gz" [1] Download SUCCESS + ls -alh ./bin total 1.9G drwxr-sr-x 6 jenkins jenkins 4.0K Apr 26 19:21 . drwxr-sr-x 19 jenkins jenkins 4.0K Apr 26 19:21 .. drwxr-sr-x 2 jenkins jenkins 4.0K May 19 2023 bin drwxr-sr-x 4 jenkins jenkins 4.0K May 10 2023 etc -rwxr-xr-x 1 jenkins jenkins 17M Apr 2 2020 etcdctl -rwxr-xr-x 1 jenkins jenkins 44M Apr 26 19:20 go-ycsb -rwxr-xr-x 1 jenkins jenkins 3.8M Apr 26 19:20 jq drwxr-sr-x 3 jenkins jenkins 4.0K May 10 2023 lib lrwxrwxrwx 1 jenkins jenkins 13 Apr 26 19:07 libc++.so.1 -> libc++.so.1.0 -rwxr-xr-x 1 jenkins jenkins 1016K Nov 7 01:00 libc++.so.1.0 lrwxrwxrwx 1 jenkins jenkins 16 Apr 26 19:07 libc++abi.so.1 -> libc++abi.so.1.0 -rwxr-xr-x 1 jenkins jenkins 358K Nov 7 01:00 libc++abi.so.1.0 lrwxrwxrwx 1 jenkins jenkins 13 Apr 26 19:07 libgmssl.so -> libgmssl.so.3 lrwxrwxrwx 1 jenkins jenkins 15 Apr 26 19:07 libgmssl.so.3 -> libgmssl.so.3.0 -rwxr-xr-x 1 jenkins jenkins 2.6M Apr 26 18:25 libgmssl.so.3.0 -rwxr-xr-x 1 jenkins jenkins 272M Apr 26 19:08 libtiflash_proxy.so -rwxr-xr-x 1 jenkins jenkins 50M Jul 29 2020 minio -rwxr-xr-x 1 jenkins jenkins 37M Apr 26 10:19 pd-api-bench -rwxr-xr-x 1 jenkins jenkins 44M Apr 26 10:18 pd-ctl -rwxr-xr-x 1 jenkins jenkins 36M Apr 26 10:18 pd-heartbeat-bench -rwxr-xr-x 1 jenkins jenkins 32M Apr 26 10:18 pd-recover -rwxr-xr-x 1 jenkins jenkins 106M Apr 26 10:18 pd-server -rwxr-xr-x 1 jenkins jenkins 26M Apr 26 10:18 pd-tso-bench -rwxr-xr-x 1 jenkins jenkins 3.0M Apr 26 10:19 pd-ut -rwxr-xr-x 1 jenkins jenkins 32M Apr 26 10:18 regions-dump drwxr-sr-x 4 jenkins jenkins 4.0K May 10 2023 share -rwxr-xr-x 1 jenkins jenkins 32M Apr 26 10:19 stores-dump -rwxr-xr-x 1 jenkins jenkins 192M Sep 22 2023 sync_diff_inspector -rwxr-xr-x 1 jenkins jenkins 208M Apr 26 17:54 tidb-server -rwxr-xr-x 1 jenkins jenkins 380M Apr 26 19:07 tiflash -rwxr-xr-x 1 jenkins jenkins 418M Apr 26 06:00 tikv-server -rwxr-xr-x 1 jenkins jenkins 2.0M Apr 26 10:19 xprog + make check_third_party_binary /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/tidb-server /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/tikv-server /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/pd-server /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/tiflash /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/pd-ctl /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/sync_diff_inspector /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/go-ycsb /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/etcdctl /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/jq /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/minio /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/bin/schema-registry-start + cd - /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/third_party_download + mkdir -p bin + mv ../tiflow/bin/bin ../tiflow/bin/etc ../tiflow/bin/etcdctl ../tiflow/bin/go-ycsb ../tiflow/bin/jq ../tiflow/bin/lib ../tiflow/bin/libc++.so.1 ../tiflow/bin/libc++.so.1.0 ../tiflow/bin/libc++abi.so.1 ../tiflow/bin/libc++abi.so.1.0 ../tiflow/bin/libgmssl.so ../tiflow/bin/libgmssl.so.3 ../tiflow/bin/libgmssl.so.3.0 ../tiflow/bin/libtiflash_proxy.so ../tiflow/bin/minio ../tiflow/bin/pd-api-bench ../tiflow/bin/pd-ctl ../tiflow/bin/pd-heartbeat-bench ../tiflow/bin/pd-recover ../tiflow/bin/pd-server ../tiflow/bin/pd-tso-bench ../tiflow/bin/pd-ut ../tiflow/bin/regions-dump ../tiflow/bin/share ../tiflow/bin/stores-dump ../tiflow/bin/sync_diff_inspector ../tiflow/bin/tidb-server ../tiflow/bin/tiflash ../tiflow/bin/tikv-server ../tiflow/bin/xprog ./bin/ + ls -alh ./bin total 1.9G drwxr-sr-x 6 jenkins jenkins 4.0K Apr 26 19:21 . drwxr-sr-x 3 jenkins jenkins 4.0K Apr 26 19:21 .. drwxr-sr-x 2 jenkins jenkins 4.0K May 19 2023 bin drwxr-sr-x 4 jenkins jenkins 4.0K May 10 2023 etc -rwxr-xr-x 1 jenkins jenkins 17M Apr 2 2020 etcdctl -rwxr-xr-x 1 jenkins jenkins 44M Apr 26 19:20 go-ycsb -rwxr-xr-x 1 jenkins jenkins 3.8M Apr 26 19:20 jq drwxr-sr-x 3 jenkins jenkins 4.0K May 10 2023 lib lrwxrwxrwx 1 jenkins jenkins 13 Apr 26 19:07 libc++.so.1 -> libc++.so.1.0 -rwxr-xr-x 1 jenkins jenkins 1016K Nov 7 01:00 libc++.so.1.0 lrwxrwxrwx 1 jenkins jenkins 16 Apr 26 19:07 libc++abi.so.1 -> libc++abi.so.1.0 -rwxr-xr-x 1 jenkins jenkins 358K Nov 7 01:00 libc++abi.so.1.0 lrwxrwxrwx 1 jenkins jenkins 13 Apr 26 19:07 libgmssl.so -> libgmssl.so.3 lrwxrwxrwx 1 jenkins jenkins 15 Apr 26 19:07 libgmssl.so.3 -> libgmssl.so.3.0 -rwxr-xr-x 1 jenkins jenkins 2.6M Apr 26 18:25 libgmssl.so.3.0 -rwxr-xr-x 1 jenkins jenkins 272M Apr 26 19:08 libtiflash_proxy.so -rwxr-xr-x 1 jenkins jenkins 50M Jul 29 2020 minio -rwxr-xr-x 1 jenkins jenkins 37M Apr 26 10:19 pd-api-bench -rwxr-xr-x 1 jenkins jenkins 44M Apr 26 10:18 pd-ctl -rwxr-xr-x 1 jenkins jenkins 36M Apr 26 10:18 pd-heartbeat-bench -rwxr-xr-x 1 jenkins jenkins 32M Apr 26 10:18 pd-recover -rwxr-xr-x 1 jenkins jenkins 106M Apr 26 10:18 pd-server -rwxr-xr-x 1 jenkins jenkins 26M Apr 26 10:18 pd-tso-bench -rwxr-xr-x 1 jenkins jenkins 3.0M Apr 26 10:19 pd-ut -rwxr-xr-x 1 jenkins jenkins 32M Apr 26 10:18 regions-dump drwxr-sr-x 4 jenkins jenkins 4.0K May 10 2023 share -rwxr-xr-x 1 jenkins jenkins 32M Apr 26 10:19 stores-dump -rwxr-xr-x 1 jenkins jenkins 192M Sep 22 2023 sync_diff_inspector -rwxr-xr-x 1 jenkins jenkins 208M Apr 26 17:54 tidb-server -rwxr-xr-x 1 jenkins jenkins 380M Apr 26 19:07 tiflash -rwxr-xr-x 1 jenkins jenkins 418M Apr 26 06:00 tikv-server -rwxr-xr-x 1 jenkins jenkins 2.0M Apr 26 10:19 xprog + ./bin/tidb-server -V Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore + ./bin/pd-server -V Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 + ./bin/tikv-server -V TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + ./bin/tiflash --version TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored + ./bin/sync_diff_inspector --version App Name: sync_diff_inspector v2.0 Release Version: v7.4.0 Git Commit Hash: d671b0840063bc2532941f02e02e12627402844c Git Branch: heads/refs/tags/v7.4.0 UTC Build Time: 2023-09-22 03:51:56 Go Version: go1.21.1 [Pipeline] } [Pipeline] // retry [Pipeline] } [Pipeline] // dir [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache Cache restored successfully (binary/pingcap/tiflow/cdc-integration-test/rev-d0329d7-16f5d59) 1191586816 bytes in 3.48 secs (342678684 bytes/sec) [Pipeline] { [Pipeline] sh + ls -alh ./bin total 1.2G drwxr-sr-x 2 jenkins jenkins 4.0K Apr 26 19:21 . drwxr-sr-x 19 jenkins jenkins 4.0K Apr 26 19:21 .. -rwxr-xr-x 1 jenkins jenkins 220M Apr 26 18:12 cdc -rwxr-xr-x 1 jenkins jenkins 359M Apr 26 18:12 cdc.test -rwxr-xr-x 1 jenkins jenkins 183M Apr 26 18:08 cdc_kafka_consumer -rwxr-xr-x 1 jenkins jenkins 183M Apr 26 18:08 cdc_pulsar_consumer -rwxr-xr-x 1 jenkins jenkins 182M Apr 26 18:07 cdc_storage_consumer -rwxr-xr-x 1 jenkins jenkins 12M Apr 26 18:08 oauth2-server + '[' -f ./bin/cdc ']' + '[' -f ./bin/cdc_kafka_consumer ']' + '[' -f ./bin/cdc_storage_consumer ']' + '[' -f ./bin/cdc.test ']' + ls -alh ./bin total 1.2G drwxr-sr-x 2 jenkins jenkins 4.0K Apr 26 19:21 . drwxr-sr-x 19 jenkins jenkins 4.0K Apr 26 19:21 .. -rwxr-xr-x 1 jenkins jenkins 220M Apr 26 18:12 cdc -rwxr-xr-x 1 jenkins jenkins 359M Apr 26 18:12 cdc.test -rwxr-xr-x 1 jenkins jenkins 183M Apr 26 18:08 cdc_kafka_consumer -rwxr-xr-x 1 jenkins jenkins 183M Apr 26 18:08 cdc_pulsar_consumer -rwxr-xr-x 1 jenkins jenkins 182M Apr 26 18:07 cdc_storage_consumer -rwxr-xr-x 1 jenkins jenkins 12M Apr 26 18:08 oauth2-server + ./bin/cdc version Release Version: v8.2.0-alpha-20-g16f5d59f9 Git Commit Hash: 16f5d59f936001f6d7031387873b3c668f3c5ae6 Git Branch: HEAD UTC Build Time: 2024-04-26 10:07:43 Go Version: go version go1.21.0 linux/amd64 Failpoint Build: true [Pipeline] } Cache not saved (binary/pingcap/tiflow/cdc-integration-test/rev-d0329d7-16f5d59 already exists) [Pipeline] // cache [Pipeline] cache Cache not restored (no such key found) [Pipeline] { [Pipeline] sh + cp -r ../third_party_download/bin/bin ../third_party_download/bin/etc ../third_party_download/bin/etcdctl ../third_party_download/bin/go-ycsb ../third_party_download/bin/jq ../third_party_download/bin/lib ../third_party_download/bin/libc++.so.1 ../third_party_download/bin/libc++.so.1.0 ../third_party_download/bin/libc++abi.so.1 ../third_party_download/bin/libc++abi.so.1.0 ../third_party_download/bin/libgmssl.so ../third_party_download/bin/libgmssl.so.3 ../third_party_download/bin/libgmssl.so.3.0 ../third_party_download/bin/libtiflash_proxy.so ../third_party_download/bin/minio ../third_party_download/bin/pd-api-bench ../third_party_download/bin/pd-ctl ../third_party_download/bin/pd-heartbeat-bench ../third_party_download/bin/pd-recover ../third_party_download/bin/pd-server ../third_party_download/bin/pd-tso-bench ../third_party_download/bin/pd-ut ../third_party_download/bin/regions-dump ../third_party_download/bin/share ../third_party_download/bin/stores-dump ../third_party_download/bin/sync_diff_inspector ../third_party_download/bin/tidb-server ../third_party_download/bin/tiflash ../third_party_download/bin/tikv-server ../third_party_download/bin/xprog ./bin/ + ls -alh ./bin total 3.0G drwxr-sr-x 6 jenkins jenkins 4.0K Apr 26 19:21 . drwxr-sr-x 19 jenkins jenkins 4.0K Apr 26 19:21 .. drwxr-sr-x 2 jenkins jenkins 4.0K Apr 26 19:21 bin -rwxr-xr-x 1 jenkins jenkins 220M Apr 26 18:12 cdc -rwxr-xr-x 1 jenkins jenkins 359M Apr 26 18:12 cdc.test -rwxr-xr-x 1 jenkins jenkins 183M Apr 26 18:08 cdc_kafka_consumer -rwxr-xr-x 1 jenkins jenkins 183M Apr 26 18:08 cdc_pulsar_consumer -rwxr-xr-x 1 jenkins jenkins 182M Apr 26 18:07 cdc_storage_consumer drwxr-sr-x 4 jenkins jenkins 4.0K Apr 26 19:21 etc -rwxr-xr-x 1 jenkins jenkins 17M Apr 26 19:21 etcdctl -rwxr-xr-x 1 jenkins jenkins 44M Apr 26 19:21 go-ycsb -rwxr-xr-x 1 jenkins jenkins 3.8M Apr 26 19:21 jq drwxr-sr-x 3 jenkins jenkins 4.0K Apr 26 19:21 lib lrwxrwxrwx 1 jenkins jenkins 13 Apr 26 19:21 libc++.so.1 -> libc++.so.1.0 -rwxr-xr-x 1 jenkins jenkins 1016K Apr 26 19:21 libc++.so.1.0 lrwxrwxrwx 1 jenkins jenkins 16 Apr 26 19:21 libc++abi.so.1 -> libc++abi.so.1.0 -rwxr-xr-x 1 jenkins jenkins 358K Apr 26 19:21 libc++abi.so.1.0 lrwxrwxrwx 1 jenkins jenkins 13 Apr 26 19:21 libgmssl.so -> libgmssl.so.3 lrwxrwxrwx 1 jenkins jenkins 15 Apr 26 19:21 libgmssl.so.3 -> libgmssl.so.3.0 -rwxr-xr-x 1 jenkins jenkins 2.6M Apr 26 19:21 libgmssl.so.3.0 -rwxr-xr-x 1 jenkins jenkins 272M Apr 26 19:21 libtiflash_proxy.so -rwxr-xr-x 1 jenkins jenkins 50M Apr 26 19:21 minio -rwxr-xr-x 1 jenkins jenkins 12M Apr 26 18:08 oauth2-server -rwxr-xr-x 1 jenkins jenkins 37M Apr 26 19:21 pd-api-bench -rwxr-xr-x 1 jenkins jenkins 44M Apr 26 19:21 pd-ctl -rwxr-xr-x 1 jenkins jenkins 36M Apr 26 19:21 pd-heartbeat-bench -rwxr-xr-x 1 jenkins jenkins 32M Apr 26 19:21 pd-recover -rwxr-xr-x 1 jenkins jenkins 106M Apr 26 19:21 pd-server -rwxr-xr-x 1 jenkins jenkins 26M Apr 26 19:21 pd-tso-bench -rwxr-xr-x 1 jenkins jenkins 3.0M Apr 26 19:21 pd-ut -rwxr-xr-x 1 jenkins jenkins 32M Apr 26 19:21 regions-dump drwxr-sr-x 4 jenkins jenkins 4.0K Apr 26 19:21 share -rwxr-xr-x 1 jenkins jenkins 32M Apr 26 19:21 stores-dump -rwxr-xr-x 1 jenkins jenkins 192M Apr 26 19:21 sync_diff_inspector -rwxr-xr-x 1 jenkins jenkins 208M Apr 26 19:21 tidb-server -rwxr-xr-x 1 jenkins jenkins 380M Apr 26 19:21 tiflash -rwxr-xr-x 1 jenkins jenkins 418M Apr 26 19:21 tikv-server -rwxr-xr-x 1 jenkins jenkins 2.0M Apr 26 19:21 xprog [Pipeline] } ERROR: Failed to launch pingcap-tiflow-pull-cdc-integration-kafka-test-1735-q9n5k-bd39h io.fabric8.kubernetes.client.KubernetesClientTimeoutException: Timed out waiting for [1000000] milliseconds for [Pod] with name:[pingcap-tiflow-pull-cdc-integration-kafka-test-1735-q9n5k-bd39h] in namespace [jenkins-tiflow]. at io.fabric8.kubernetes.client.dsl.internal.BaseOperation.waitUntilCondition(BaseOperation.java:939) at io.fabric8.kubernetes.client.dsl.internal.BaseOperation.waitUntilReady(BaseOperation.java:921) at io.fabric8.kubernetes.client.dsl.internal.BaseOperation.waitUntilReady(BaseOperation.java:97) at org.csanchez.jenkins.plugins.kubernetes.KubernetesLauncher.launch(KubernetesLauncher.java:185) at hudson.slaves.SlaveComputer.lambda$_connect$0(SlaveComputer.java:297) at jenkins.util.ContextResettingExecutorService$2.call(ContextResettingExecutorService.java:46) at jenkins.security.ImpersonatingExecutorService$2.call(ImpersonatingExecutorService.java:80) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:829) Cache saved successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc) 3716673536 bytes in 69.73 secs (53303547 bytes/sec) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Tests) [Pipeline] parallel [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G00') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G01') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G02') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G03') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G04') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G05') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G06') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G07') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G08') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G09') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G10') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G11') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G12') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G13') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G14') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G15') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G16') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G17') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G00') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G01') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G02') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G03') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G04') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G05') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G06') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G07') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G08') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G09') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G10') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G11') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G12') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G13') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G14') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G15') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G16') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G17') [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1735-52ncr-nbk6v Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7vksj-r2cpw [Pipeline] node Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1735-ww8h2-wvzdx [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6hf3q-f799q Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z94t0-92h56 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2x98t-8x2nm [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h8q8j-n4jkm Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1735-cdv81-9zlwn Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1 [Pipeline] podTemplate [Pipeline] { [Pipeline] node Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb [Pipeline] podTemplate [Pipeline] { [Pipeline] node Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6hf3q-f799q is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1735-6hf3q-vsbrf --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "770a5964d5c4cf3897ae66679cb13f146d925913" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1735-6hf3q" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6hf3q-f799q" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6hf3q-f799q" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6hf3q-f799q" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6hf3q-f799q in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] checkout The recommended git tool is: git Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z9nq8-r4p88 [Pipeline] node No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@7b433555; decorates RemoteLauncher[hudson.remoting.Channel@1f272ba2:JNLP4-connect connection from 10.233.105.162/10.233.105.162:46690] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Still waiting to schedule task ‘pingcap-tiflow-pull-cdc-integration-kafka-test-1735-52ncr-nbk6v’ is offline Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1735-tmlzt-m5s5x [Pipeline] podTemplate [Pipeline] { [Pipeline] node Still waiting to schedule task ‘pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7vksj-r2cpw’ is offline Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h32rl-nl1kg Still waiting to schedule task ‘pingcap-tiflow-pull-cdc-integration-kafka-test-1735-ww8h2-wvzdx’ is offline Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1735-1hp4d-3x2xd [Pipeline] node Avoid second fetch > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 Checking out Revision cb9bc9e8822a01a5d59a2f670fb429e588065145 (origin/main) Commit message: "feat(tidb): test flashbacktest package (#2942)" [Pipeline] withEnv [Pipeline] { Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1735-llqb6-jpndj [Pipeline] container [Pipeline] { [Pipeline] stage [Pipeline] { (Test) [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git > git config core.sparsecheckout # timeout=10 > git checkout -f cb9bc9e8822a01a5d59a2f670fb429e588065145 # timeout=10 Still waiting to schedule task ‘pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z94t0-92h56’ is offline Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1735-7lxf9-jb4lt --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "46c9203cc535cdb3219a3b20f3ba9f8f2ecbca4e" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1735-7lxf9" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Still waiting to schedule task ‘pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2x98t-8x2nm’ is offline Still waiting to schedule task ‘pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h8q8j-n4jkm’ is offline Still waiting to schedule task ‘pingcap-tiflow-pull-cdc-integration-kafka-test-1735-cdv81-9zlwn’ is offline Still waiting to schedule task ‘pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb’ is offline Still waiting to schedule task ‘pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0’ is offline Still waiting to schedule task ‘pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z9nq8-r4p88’ is offline Still waiting to schedule task ‘pingcap-tiflow-pull-cdc-integration-kafka-test-1735-tmlzt-m5s5x’ is offline Still waiting to schedule task ‘pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h32rl-nl1kg’ is offline Still waiting to schedule task ‘pingcap-tiflow-pull-cdc-integration-kafka-test-1735-1hp4d-3x2xd’ is offline Still waiting to schedule task ‘pingcap-tiflow-pull-cdc-integration-kafka-test-1735-llqb6-jpndj’ is offline Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc) 3716673536 bytes in 12.72 secs (292229903 bytes/sec) [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] { [Pipeline] node [Pipeline] node [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] sh [Pipeline] checkout The recommended git tool is: git + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // container [Pipeline] sh Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2k3wm-m4v4j Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6xvpn-7nz6g No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@626415be; decorates RemoteLauncher[hudson.remoting.Channel@6923ba47:JNLP4-connect connection from 10.233.93.148/10.233.93.148:45220] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G03 Run cases: row_format drop_many_tables processor_stop_delay partition_table PROW_JOB_ID=586cc306-42d6-4207-90bf-aa4c613fe26b JENKINS_NODE_COOKIE=cedacb56-abc0-4e52-868d-1bea30f9e07c BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6hf3q-f799q HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1783813226958426112","prowjobid":"586cc306-42d6-4207-90bf-aa4c613fe26b","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","base_link":"https://github.com/pingcap/tiflow/commit/d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","pulls":[{"number":10904,"author":"CharlesCheung96","sha":"16f5d59f936001f6d7031387873b3c668f3c5ae6","title":"redo(ticdc): enable pprof and set memory limit for redo applier","link":"https://github.com/pingcap/tiflow/pull/10904","commit_link":"https://github.com/pingcap/tiflow/pull/10904/commits/16f5d59f936001f6d7031387873b3c668f3c5ae6","author_link":"https://github.com/CharlesCheung96"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1735 TEST_GROUP=G03 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1783813226958426112 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=618760b8f23481032f6d0010db684d46f840e8e1 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-6hf3q GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6hf3q-f799q pingcap_tiflow_pull_cdc_integration_kafka_test_1735-6hf3q GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6hf3q-f799q GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1735 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/row_format/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Avoid second fetch Checking out Revision cb9bc9e8822a01a5d59a2f670fb429e588065145 (origin/main) Commit message: "feat(tidb): test flashbacktest package (#2942)" [Pipeline] withEnv [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] stage [Pipeline] { (Test) [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f cb9bc9e8822a01a5d59a2f670fb429e588065145 # timeout=10 start tidb cluster in /tmp/tidb_cdc_test/row_format Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc) 3716673536 bytes in 7.52 secs (494384452 bytes/sec) [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] sh ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 Still waiting to schedule task ‘pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2k3wm-m4v4j’ is offline [Pipeline] } Still waiting to schedule task ‘pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6xvpn-7nz6g’ is offline [Pipeline] // timeout [Pipeline] } [Pipeline] // container [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G08 Run cases: processor_err_chan changefeed_reconstruct multi_capture synced_status_with_redo PROW_JOB_ID=586cc306-42d6-4207-90bf-aa4c613fe26b JENKINS_NODE_COOKIE=caee5fdb-65d9-4b68-aac3-3e8227a51987 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1783813226958426112","prowjobid":"586cc306-42d6-4207-90bf-aa4c613fe26b","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","base_link":"https://github.com/pingcap/tiflow/commit/d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","pulls":[{"number":10904,"author":"CharlesCheung96","sha":"16f5d59f936001f6d7031387873b3c668f3c5ae6","title":"redo(ticdc): enable pprof and set memory limit for redo applier","link":"https://github.com/pingcap/tiflow/pull/10904","commit_link":"https://github.com/pingcap/tiflow/pull/10904/commits/16f5d59f936001f6d7031387873b3c668f3c5ae6","author_link":"https://github.com/CharlesCheung96"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1735 TEST_GROUP=G08 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1783813226958426112 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=618760b8f23481032f6d0010db684d46f840e8e1 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-7lxf9 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-7lxf9 pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1735 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_err_chan/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689489e4000c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6hf3q-f799q, pid:1284, start at 2024-04-26 19:23:12.386502339 +0800 CST m=+5.113827449 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:25:12.393 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:23:12.377 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:13:12.377 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689489e4000c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6hf3q-f799q, pid:1284, start at 2024-04-26 19:23:12.386502339 +0800 CST m=+5.113827449 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:25:12.393 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:23:12.377 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:13:12.377 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68948cec0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6hf3q-f799q, pid:1369, start at 2024-04-26 19:23:12.605711619 +0800 CST m=+5.276795169 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:25:12.612 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:23:12.571 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:13:12.571 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/row_format/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/row_format/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/row_format/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/row_format/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/row_format/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } start tidb cluster in /tmp/tidb_cdc_test/processor_err_chan Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.row_format.cli.2826.out cli tso query --pd=http://127.0.0.1:2379 + set +x + tso='449349050938687489 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349050938687489 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Fri Apr 26 19:23:17 CST 2024] <<<<<< START cdc server in row_format case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.row_format.28642866.out server --log-file /tmp/tidb_cdc_test/row_format/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/row_format/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:23:20 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e346e6b3-27c0-4e95-a78d-385ed9e0e785 {"id":"e346e6b3-27c0-4e95-a78d-385ed9e0e785","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130597} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a24f8a8cf e346e6b3-27c0-4e95-a78d-385ed9e0e785 /tidb/cdc/default/default/upstream/7362134802904826999 {"id":7362134802904826999,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e346e6b3-27c0-4e95-a78d-385ed9e0e785 {"id":"e346e6b3-27c0-4e95-a78d-385ed9e0e785","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130597} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a24f8a8cf e346e6b3-27c0-4e95-a78d-385ed9e0e785 /tidb/cdc/default/default/upstream/7362134802904826999 {"id":7362134802904826999,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e346e6b3-27c0-4e95-a78d-385ed9e0e785 {"id":"e346e6b3-27c0-4e95-a78d-385ed9e0e785","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130597} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a24f8a8cf e346e6b3-27c0-4e95-a78d-385ed9e0e785 /tidb/cdc/default/default/upstream/7362134802904826999 {"id":7362134802904826999,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.row_format.cli.2916.out cli changefeed create --start-ts=449349050938687489 '--sink-uri=kafka://127.0.0.1:9092/ticdc-row-format-test-17175?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: 16c914bc-8ccf-44c1-a303-8f2e7f662316 Info: {"upstream_id":7362134802904826999,"namespace":"default","id":"16c914bc-8ccf-44c1-a303-8f2e7f662316","sink_uri":"kafka://127.0.0.1:9092/ticdc-row-format-test-17175?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:23:20.934480741+08:00","start_ts":449349050938687489,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349050938687489,"checkpoint_ts":449349050938687489,"checkpoint_time":"2024-04-26 19:23:15.927"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + set +x [Fri Apr 26 19:23:22 CST 2024] <<<<<< START kafka consumer in row_format case >>>>>> Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2k3wm-m4v4j is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1735-2k3wm-m89d3 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "077ac6219c49e0a3086f15f50a802ce373deb9c0" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1735-2k3wm" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2k3wm-m4v4j" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2k3wm-m4v4j" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2k3wm-m4v4j" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2k3wm-m4v4j in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] checkout ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) The recommended git tool is: git VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689571880019 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1, pid:1355, start at 2024-04-26 19:23:27.245357232 +0800 CST m=+5.079655950 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:25:27.253 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:23:27.252 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:13:27.252 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689571880019 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1, pid:1355, start at 2024-04-26 19:23:27.245357232 +0800 CST m=+5.079655950 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:25:27.253 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:23:27.252 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:13:27.252 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689574680016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1, pid:1441, start at 2024-04-26 19:23:27.424420666 +0800 CST m=+5.198435172 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:25:27.431 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:23:27.436 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:13:27.436 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/processor_err_chan/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/processor_err_chan/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/processor_err_chan/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/processor_err_chan/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/processor_err_chan/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1735-3glp8-8zqg5 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "862b474f36f4f51462aacff1fb24596318226ae6" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1735-3glp8" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] checkout The recommended git tool is: git No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@3dd6c339; decorates RemoteLauncher[hudson.remoting.Channel@5c57671d:JNLP4-connect connection from 10.233.69.106/10.233.69.106:49976] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@689177dd; decorates RemoteLauncher[hudson.remoting.Channel@223aa19:JNLP4-connect connection from 10.233.123.110/10.233.123.110:51302] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Avoid second fetch Checking out Revision cb9bc9e8822a01a5d59a2f670fb429e588065145 (origin/main) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 table row_format.finish_mark not exists for 1-th check, retry later > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f cb9bc9e8822a01a5d59a2f670fb429e588065145 # timeout=10 Avoid second fetch Checking out Revision cb9bc9e8822a01a5d59a2f670fb429e588065145 (origin/main) Commit message: "feat(tidb): test flashbacktest package (#2942)" [Pipeline] withEnv [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] stage [Pipeline] { (Test) Commit message: "feat(tidb): test flashbacktest package (#2942)" [Pipeline] withEnv [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] stage [Pipeline] { (Test) [Pipeline] withCredentials > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f cb9bc9e8822a01a5d59a2f670fb429e588065145 # timeout=10 Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache [Fri Apr 26 19:23:33 CST 2024] <<<<<< START cdc server in processor_err_chan case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/ProcessorAddTableError=1*return(true)' + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.processor_err_chan.29702972.out server --log-file /tmp/tidb_cdc_test/processor_err_chan/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/processor_err_chan/cdc_data --cluster-id default --config /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_err_chan/conf/server.toml --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table row_format.finish_mark not exists for 2-th check, retry later table row_format.finish_mark not exists for 3-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:23:36 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/0b2272a5-51c2-4a01-beaa-ad029d04eab7 {"id":"0b2272a5-51c2-4a01-beaa-ad029d04eab7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130613} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2530c065 0b2272a5-51c2-4a01-beaa-ad029d04eab7 /tidb/cdc/default/default/upstream/7362134856972947662 {"id":7362134856972947662,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/0b2272a5-51c2-4a01-beaa-ad029d04eab7 {"id":"0b2272a5-51c2-4a01-beaa-ad029d04eab7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130613} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2530c065 0b2272a5-51c2-4a01-beaa-ad029d04eab7 /tidb/cdc/default/default/upstream/7362134856972947662 {"id":7362134856972947662,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/0b2272a5-51c2-4a01-beaa-ad029d04eab7 {"id":"0b2272a5-51c2-4a01-beaa-ad029d04eab7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130613} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2530c065 0b2272a5-51c2-4a01-beaa-ad029d04eab7 /tidb/cdc/default/default/upstream/7362134856972947662 {"id":7362134856972947662,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Fri Apr 26 19:23:36 CST 2024] <<<<<< START kafka consumer in processor_err_chan case >>>>>> check_changefeed_state http://127.0.0.1:2379 33bea42a-b66c-4b0f-ab47-025487fabae3 normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=33bea42a-b66c-4b0f-ab47-025487fabae3 + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 33bea42a-b66c-4b0f-ab47-025487fabae3 -s + info='{ "upstream_id": 7362134856972947662, "namespace": "default", "id": "33bea42a-b66c-4b0f-ab47-025487fabae3", "state": "normal", "checkpoint_tso": 449349056371884036, "checkpoint_time": "2024-04-26 19:23:36.653", "error": null }' + echo '{ "upstream_id": 7362134856972947662, "namespace": "default", "id": "33bea42a-b66c-4b0f-ab47-025487fabae3", "state": "normal", "checkpoint_tso": 449349056371884036, "checkpoint_time": "2024-04-26 19:23:36.653", "error": null }' { "upstream_id": 7362134856972947662, "namespace": "default", "id": "33bea42a-b66c-4b0f-ab47-025487fabae3", "state": "normal", "checkpoint_tso": 449349056371884036, "checkpoint_time": "2024-04-26 19:23:36.653", "error": null } ++ echo '{' '"upstream_id":' 7362134856972947662, '"namespace":' '"default",' '"id":' '"33bea42a-b66c-4b0f-ab47-025487fabae3",' '"state":' '"normal",' '"checkpoint_tso":' 449349056371884036, '"checkpoint_time":' '"2024-04-26' '19:23:36.653",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7362134856972947662, '"namespace":' '"default",' '"id":' '"33bea42a-b66c-4b0f-ab47-025487fabae3",' '"state":' '"normal",' '"checkpoint_tso":' 449349056371884036, '"checkpoint_time":' '"2024-04-26' '19:23:36.653",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check diff failed 1-th time, retry later table row_format.finish_mark not exists for 4-th check, retry later table row_format.finish_mark not exists for 5-th check, retry later check diff failed 2-th time, retry later table row_format.finish_mark not exists for 6-th check, retry later check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:23:43 CST 2024] <<<<<< run test case processor_err_chan success! >>>>>> table row_format.finish_mark not exists for 7-th check, retry later Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z94t0-92h56 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1735-z94t0-610hr --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "dd037c7455d9d9bf37a9c4b39920881d93b2fa91" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1735-z94t0" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z94t0-92h56" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z94t0-92h56" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z94t0-92h56" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z94t0-92h56 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1735-4851m-l5ghv --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "1fc22a2921cedc621ab44bd1b1ff40191a93f774" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1735-4851m" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test table row_format.finish_mark not exists for 8-th check, retry later table row_format.finish_mark not exists for 9-th check, retry later table row_format.finish_mark not exists for 10-th check, retry later table row_format.finish_mark exists check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... wait process cdc.test exit for 3-th time... cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Fri Apr 26 19:23:53 CST 2024] <<<<<< run test case row_format success! >>>>>> =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_reconstruct/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2x98t-8x2nm is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1735-2x98t-mpqzv --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "1fc43d7610e00f30086545ad3e40282cbd216ba3" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1735-2x98t" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2x98t-8x2nm" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2x98t-8x2nm" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2x98t-8x2nm" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2x98t-8x2nm in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test start tidb cluster in /tmp/tidb_cdc_test/changefeed_reconstruct Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc) 3716673536 bytes in 31.41 secs (118334120 bytes/sec) [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] } [Pipeline] checkout [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/drop_many_tables/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... The recommended git tool is: git [Pipeline] // timeout The recommended git tool is: git [Pipeline] } [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@6245fb85; decorates RemoteLauncher[hudson.remoting.Channel@26f84785:JNLP4-connect connection from 10.233.107.148/10.233.107.148:52512] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@3d22bd9; decorates RemoteLauncher[hudson.remoting.Channel@7129056f:JNLP4-connect connection from 10.233.71.201/10.233.71.201:53300] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@48bcee67; decorates RemoteLauncher[hudson.remoting.Channel@1922fdea:JNLP4-connect connection from 10.233.90.118/10.233.90.118:52650] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 start tidb cluster in /tmp/tidb_cdc_test/drop_many_tables Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68981f40000e Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1, pid:4400, start at 2024-04-26 19:24:11.100276276 +0800 CST m=+5.012651460 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:26:11.108 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:24:11.088 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:14:11.088 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68981f40000e Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1, pid:4400, start at 2024-04-26 19:24:11.100276276 +0800 CST m=+5.012651460 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:26:11.108 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:24:11.088 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:14:11.088 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689823140014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1, pid:4483, start at 2024-04-26 19:24:11.360757756 +0800 CST m=+5.224122004 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:26:11.367 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:24:11.333 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:14:11.333 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/changefeed_reconstruct/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/changefeed_reconstruct/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_reconstruct/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_reconstruct/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_reconstruct/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Avoid second fetch Checking out Revision cb9bc9e8822a01a5d59a2f670fb429e588065145 (origin/main) Commit message: "feat(tidb): test flashbacktest package (#2942)" Avoid second fetch Checking out Revision cb9bc9e8822a01a5d59a2f670fb429e588065145 (origin/main) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f cb9bc9e8822a01a5d59a2f670fb429e588065145 # timeout=10 Commit message: "feat(tidb): test flashbacktest package (#2942)" > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f cb9bc9e8822a01a5d59a2f670fb429e588065145 # timeout=10 [Fri Apr 26 19:24:16 CST 2024] <<<<<< START cdc server in changefeed_reconstruct case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_reconstruct.58955897.out server --log-file /tmp/tidb_cdc_test/changefeed_reconstruct/cdcserver1.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_reconstruct/cdc_dataserver1 --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Avoid second fetch Checking out Revision cb9bc9e8822a01a5d59a2f670fb429e588065145 (origin/main) Commit message: "feat(tidb): test flashbacktest package (#2942)" > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f cb9bc9e8822a01a5d59a2f670fb429e588065145 # timeout=10 Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:24:19 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/88d17632-d452-4ae3-8525-4a03693bbdb2 {"id":"88d17632-d452-4ae3-8525-4a03693bbdb2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130656} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a25e440cc 88d17632-d452-4ae3-8525-4a03693bbdb2 /tidb/cdc/default/default/upstream/7362135048827198390 {"id":7362135048827198390,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/88d17632-d452-4ae3-8525-4a03693bbdb2 {"id":"88d17632-d452-4ae3-8525-4a03693bbdb2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130656} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a25e440cc 88d17632-d452-4ae3-8525-4a03693bbdb2 /tidb/cdc/default/default/upstream/7362135048827198390 {"id":7362135048827198390,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/88d17632-d452-4ae3-8525-4a03693bbdb2 {"id":"88d17632-d452-4ae3-8525-4a03693bbdb2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130656} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a25e440cc 88d17632-d452-4ae3-8525-4a03693bbdb2 /tidb/cdc/default/default/upstream/7362135048827198390 {"id":7362135048827198390,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Fri Apr 26 19:24:19 CST 2024] <<<<<< START kafka consumer in changefeed_reconstruct case >>>>>> Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ***************** properties ***************** "workload"="core" "readallfields"="true" "mysql.db"="changefeed_reconstruct" "operationcount"="0" "mysql.port"="4000" "updateproportion"="0" "recordcount"="50" "scanproportion"="0" "threadcount"="4" "mysql.user"="root" "readproportion"="0" "requestdistribution"="uniform" "dotransactions"="false" "mysql.host"="127.0.0.1" "insertproportion"="0" ********************************************** Run finished, takes 17.104258ms INSERT - Takes(s): 0.0, Count: 47, OPS: 3668.0, Avg(us): 1280, Min(us): 867, Max(us): 4171, 95th(us): 5000, 99th(us): 5000 table changefeed_reconstruct.usertable not exists for 1-th check, retry later table changefeed_reconstruct.usertable exists check diff failed 1-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff successfully wait process 5900 exit for 1-th time... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c6898f9500018 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6hf3q-f799q, pid:4281, start at 2024-04-26 19:24:25.091406767 +0800 CST m=+5.131384910 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:26:25.098 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:24:25.093 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:14:25.093 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c6898f9500018 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6hf3q-f799q, pid:4281, start at 2024-04-26 19:24:25.091406767 +0800 CST m=+5.131384910 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:26:25.098 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:24:25.093 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:14:25.093 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c6898fb8c0015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6hf3q-f799q, pid:4371, start at 2024-04-26 19:24:25.232577898 +0800 CST m=+5.213944830 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:26:25.239 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:24:25.237 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:14:25.237 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/drop_many_tables/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/drop_many_tables/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/drop_many_tables/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/drop_many_tables/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/drop_many_tables/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } wait process 5900 exit for 2-th time... /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils/kill_cdc_pid: line 19: kill: (5900) - No such process wait process 5900 exit for 3-th time... process 5900 already exit check_no_capture http://127.0.0.1:2379 Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc) 3716673536 bytes in 12.38 secs (300148520 bytes/sec) [Pipeline] { [Pipeline] // container [Pipeline] sh parse error: Invalid numeric literal at line 1, column 6 run task successfully [Fri Apr 26 19:24:26 CST 2024] <<<<<< START cdc server in changefeed_reconstruct case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_reconstruct.62126214.out server --log-file /tmp/tidb_cdc_test/changefeed_reconstruct/cdcserver2.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_reconstruct/cdc_dataserver2 --cluster-id default --addr 127.0.0.1:8300 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [Pipeline] container [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G16 Run cases: owner_resign processor_etcd_worker_delay sink_hang PROW_JOB_ID=586cc306-42d6-4207-90bf-aa4c613fe26b JENKINS_NODE_COOKIE=387f7fbf-244e-40bd-8dd5-3a9aaeb450f4 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2k3wm-m4v4j HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1783813226958426112","prowjobid":"586cc306-42d6-4207-90bf-aa4c613fe26b","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","base_link":"https://github.com/pingcap/tiflow/commit/d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","pulls":[{"number":10904,"author":"CharlesCheung96","sha":"16f5d59f936001f6d7031387873b3c668f3c5ae6","title":"redo(ticdc): enable pprof and set memory limit for redo applier","link":"https://github.com/pingcap/tiflow/pull/10904","commit_link":"https://github.com/pingcap/tiflow/pull/10904/commits/16f5d59f936001f6d7031387873b3c668f3c5ae6","author_link":"https://github.com/CharlesCheung96"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1735 TEST_GROUP=G16 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1783813226958426112 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=618760b8f23481032f6d0010db684d46f840e8e1 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-2k3wm GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-2k3wm pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2k3wm-m4v4j GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2k3wm-m4v4j GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1735 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/owner_resign/run.sh using Sink-Type: kafka... <<================= [Fri Apr 26 19:24:27 CST 2024] <<<<<< run test case owner_resign success! >>>>>> + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + + echo dump nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] // container [Pipeline] sh + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.drop_many_tables.cli.5673.out cli tso query --pd=http://127.0.0.1:2379 [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G10 Run cases: default_value simple cdc_server_tips event_filter sql_mode [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache PROW_JOB_ID=586cc306-42d6-4207-90bf-aa4c613fe26b JENKINS_NODE_COOKIE=ee06c1dc-509b-40dc-a52a-1f84accb21f6 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1783813226958426112","prowjobid":"586cc306-42d6-4207-90bf-aa4c613fe26b","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","base_link":"https://github.com/pingcap/tiflow/commit/d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","pulls":[{"number":10904,"author":"CharlesCheung96","sha":"16f5d59f936001f6d7031387873b3c668f3c5ae6","title":"redo(ticdc): enable pprof and set memory limit for redo applier","link":"https://github.com/pingcap/tiflow/pull/10904","commit_link":"https://github.com/pingcap/tiflow/pull/10904/commits/16f5d59f936001f6d7031387873b3c668f3c5ae6","author_link":"https://github.com/CharlesCheung96"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1735 TEST_GROUP=G10 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1783813226958426112 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=618760b8f23481032f6d0010db684d46f840e8e1 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-3glp8 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-3glp8 pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1735 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/default_value/run.sh using Sink-Type: kafka... <<================= + set +x + tso='449349069909262337 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349069909262337 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Fri Apr 26 19:24:29 CST 2024] <<<<<< START cdc server in drop_many_tables case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.drop_many_tables.57165718.out server --log-file /tmp/tidb_cdc_test/drop_many_tables/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/drop_many_tables/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:24:29 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: changefeedID: default/5da7e752-ba99-4306-894e-0da6f713cfd0 {UpstreamID:7362135048827198390 Namespace:default ID:5da7e752-ba99-4306-894e-0da6f713cfd0 SinkURI:kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-15210?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-04-26 19:24:19.695262569 +0800 CST StartTs:449349067613929477 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0039ae900 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-20-g16f5d59f9 Epoch:449349067653251073} {CheckpointTs:449349068820054020 MinTableBarrierTs:449349070130511876 AdminJobType:noop} span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449349068820054020, checkpointTs: 449349068820054020, state: Preparing *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/30bbd0bc-7d34-41f0-8433-e2c7be7bac2e {"id":"30bbd0bc-7d34-41f0-8433-e2c7be7bac2e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130667} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a25e44173 30bbd0bc-7d34-41f0-8433-e2c7be7bac2e /tidb/cdc/default/default/changefeed/info/5da7e752-ba99-4306-894e-0da6f713cfd0 {"upstream-id":7362135048827198390,"namespace":"default","changefeed-id":"5da7e752-ba99-4306-894e-0da6f713cfd0","sink-uri":"kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-15210?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:24:19.695262569+08:00","start-ts":449349067613929477,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349067653251073} /tidb/cdc/default/default/changefeed/status/5da7e752-ba99-4306-894e-0da6f713cfd0 {"checkpoint-ts":449349068820054020,"min-table-barrier-ts":449349070130511876,"admin-job-type":0} /tidb/cdc/default/default/task/position/30bbd0bc-7d34-41f0-8433-e2c7be7bac2e/5da7e752-ba99-4306-894e-0da6f713cfd0 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7362135048827198390 {"id":7362135048827198390,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/5da7e752-ba99-4306-894e-0da6f713cfd0 {UpstreamID:7362135048827198390 Namespace:default ID:5da7e752-ba99-4306-894e-0da6f713cfd0 SinkURI:kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-15210?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-04-26 19:24:19.695262569 +0800 CST StartTs:449349067613929477 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0039ae900 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-20-g16f5d59f9 Epoch:449349067653251073} {CheckpointTs:449349068820054020 MinTableBarrierTs:449349070130511876 AdminJobType:noop} span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449349068820054020, checkpointTs: 449349068820054020, state: Preparing *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/30bbd0bc-7d34-41f0-8433-e2c7be7bac2e {"id":"30bbd0bc-7d34-41f0-8433-e2c7be7bac2e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130667} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a25e44173 30bbd0bc-7d34-41f0-8433-e2c7be7bac2e /tidb/cdc/default/default/changefeed/info/5da7e752-ba99-4306-894e-0da6f713cfd0 {"upstream-id":7362135048827198390,"namespace":"default","changefeed-id":"5da7e752-ba99-4306-894e-0da6f713cfd0","sink-uri":"kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-15210?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:24:19.695262569+08:00","start-ts":449349067613929477,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349067653251073} /tidb/cdc/default/default/changefeed/status/5da7e752-ba99-4306-894e-0da6f713cfd0 {"checkpoint-ts":449349068820054020,"min-table-barrier-ts":449349070130511876,"admin-job-type":0} /tidb/cdc/default/default/task/position/30bbd0bc-7d34-41f0-8433-e2c7be7bac2e/5da7e752-ba99-4306-894e-0da6f713cfd0 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7362135048827198390 {"id":7362135048827198390,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/5da7e752-ba99-4306-894e-0da6f713cfd0 {UpstreamID:7362135048827198390 Namespace:default ID:5da7e752-ba99-4306-894e-0da6f713cfd0 SinkURI:kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-15210?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-04-26 19:24:19.695262569 +0800 CST StartTs:449349067613929477 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0039ae900 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-20-g16f5d59f9 Epoch:449349067653251073} {CheckpointTs:449349068820054020 MinTableBarrierTs:449349070130511876 AdminJobType:noop} span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449349068820054020, checkpointTs: 449349068820054020, state: Preparing *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/30bbd0bc-7d34-41f0-8433-e2c7be7bac2e {"id":"30bbd0bc-7d34-41f0-8433-e2c7be7bac2e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130667} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a25e44173 30bbd0bc-7d34-41f0-8433-e2c7be7bac2e /tidb/cdc/default/default/changefeed/info/5da7e752-ba99-4306-894e-0da6f713cfd0 {"upstream-id":7362135048827198390,"namespace":"default","changefeed-id":"5da7e752-ba99-4306-894e-0da6f713cfd0","sink-uri":"kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-15210?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:24:19.695262569+08:00","start-ts":449349067613929477,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349067653251073} /tidb/cdc/default/default/changefeed/status/5da7e752-ba99-4306-894e-0da6f713cfd0 {"checkpoint-ts":449349068820054020,"min-table-barrier-ts":449349070130511876,"admin-job-type":0} /tidb/cdc/default/default/task/position/30bbd0bc-7d34-41f0-8433-e2c7be7bac2e/5da7e752-ba99-4306-894e-0da6f713cfd0 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning"+ grep -q 'etcd info' :null} /tidb/cdc/default/default/upstream/7362135048827198390 {"id":7362135048827198390,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + break + set +x cdc.test cli capture list --pd=http://127.0.0.1:2379 2>&1 | grep id "id": "30bbd0bc-7d34-41f0-8433-e2c7be7bac2e", "cluster-id": "default" run task successfully The 1 times to try to start tidb cluster... capture_id: 30bbd0bc-7d34-41f0-8433-e2c7be7bac2e check_processor_table_count http://127.0.0.1:2379 5da7e752-ba99-4306-894e-0da6f713cfd0 30bbd0bc-7d34-41f0-8433-e2c7be7bac2e 1 find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_etcd_worker_delay/run.sh using Sink-Type: kafka... <<================= [Fri Apr 26 19:24:31 CST 2024] <<<<<< run test case processor_etcd_worker_delay success! >>>>>> run task successfully + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:24:32 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/41054e4e-cbaf-4b16-8a76-19e5f70f69fb {"id":"41054e4e-cbaf-4b16-8a76-19e5f70f69fb","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130670} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a261484d0 41054e4e-cbaf-4b16-8a76-19e5f70f69fb /tidb/cdc/default/default/upstream/7362135110073860155 {"id":7362135110073860155,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/41054e4e-cbaf-4b16-8a76-19e5f70f69fb {"id":"41054e4e-cbaf-4b16-8a76-19e5f70f69fb","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130670} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a261484d0 41054e4e-cbaf-4b16-8a76-19e5f70f69fb /tidb/cdc/default/default/upstream/7362135110073860155 {"id":7362135110073860155,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/41054e4e-cbaf-4b16-8a76-19e5f70f69fb {"id":"41054e4e-cbaf-4b16-8a76-19e5f70f69fb","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130670} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a261484d0 41054e4e-cbaf-4b16-8a76-19e5f70f69fb /tidb/cdc/default/default/upstream/7362135110073860155 {"id":7362135110073860155,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.drop_many_tables.cli.5776.out cli changefeed create --start-ts=449349069909262337 '--sink-uri=kafka://127.0.0.1:9092/ticdc-drop-tables-test-14384?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: 5434114d-0fa1-4489-ae9b-4610db81029a Info: {"upstream_id":7362135110073860155,"namespace":"default","id":"5434114d-0fa1-4489-ae9b-4610db81029a","sink_uri":"kafka://127.0.0.1:9092/ticdc-drop-tables-test-14384?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:24:33.307075269+08:00","start_ts":449349069909262337,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349069909262337,"checkpoint_ts":449349069909262337,"checkpoint_time":"2024-04-26 19:24:28.294"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... check_processor_table_count http://127.0.0.1:2379 5da7e752-ba99-4306-894e-0da6f713cfd0 30bbd0bc-7d34-41f0-8433-e2c7be7bac2e 0 table count 1 does equal to expected count 0 run task failed 1-th time, retry later + set +x [Fri Apr 26 19:24:34 CST 2024] <<<<<< START kafka consumer in drop_many_tables case >>>>>> start tidb cluster in /tmp/tidb_cdc_test/default_value Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... table drop_tables.c not exists for 1-th check, retry later check_processor_table_count http://127.0.0.1:2379 5da7e752-ba99-4306-894e-0da6f713cfd0 30bbd0bc-7d34-41f0-8433-e2c7be7bac2e 0 run task successfully ***************** properties ***************** "scanproportion"="0" "mysql.port"="4000" "dotransactions"="false" "requestdistribution"="uniform" "workload"="core" "readallfields"="true" "operationcount"="0" "recordcount"="50" "mysql.host"="127.0.0.1" "insertproportion"="0" "mysql.db"="changefeed_reconstruct" "threadcount"="4" "readproportion"="0" "updateproportion"="0" "mysql.user"="root" ********************************************** Run finished, takes 22.996141ms INSERT - Takes(s): 0.0, Count: 48, OPS: 2543.6, Avg(us): 1672, Min(us): 908, Max(us): 5702, 95th(us): 5000, 99th(us): 6000 table changefeed_reconstruct.usertable not exists for 1-th check, retry later find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sink_hang/run.sh using Sink-Type: kafka... <<================= [Fri Apr 26 19:24:36 CST 2024] <<<<<< run test case sink_hang success! >>>>>> table drop_tables.c not exists for 2-th check, retry later table changefeed_reconstruct.usertable exists check diff failed 1-th time, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7vksj-r2cpw is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1735-7vksj-z1jwm --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "de05116bf2e93ac45d105f4a02d218ee638b873d" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1735-7vksj" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7vksj-r2cpw" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7vksj-r2cpw" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7vksj-r2cpw" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7vksj-r2cpw in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test table drop_tables.c not exists for 3-th check, retry later check diff successfully \033[0;36m<<< Run all test success >>>\033[0m Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... table drop_tables.c not exists for 4-th check, retry later cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:24:42 CST 2024] <<<<<< run test case changefeed_reconstruct success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table drop_tables.c exists check diff successfully cdc.test: no process found wait process cdc.test exit for 1-th time... process cdc.test already exit [Fri Apr 26 19:24:45 CST 2024] <<<<<< run test case drop_many_tables success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689a4ee80017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0, pid:1366, start at 2024-04-26 19:24:46.951319167 +0800 CST m=+5.198358486 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:26:46.958 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:24:46.956 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:14:46.956 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689a4ee80017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0, pid:1366, start at 2024-04-26 19:24:46.951319167 +0800 CST m=+5.198358486 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:26:46.958 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:24:46.956 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:14:46.956 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689a4fb00014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0, pid:1441, start at 2024-04-26 19:24:46.995081693 +0800 CST m=+5.187534363 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:26:47.001 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:24:47.005 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:14:47.005 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/default_value/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/default_value/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/default_value/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/default_value/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/default_value/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h32rl-nl1kg is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1735-h32rl-15kb7 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "94231e161b324cf61f888dcfd8bb15a89a9be55f" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1735-h32rl" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h32rl-nl1kg" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h32rl-nl1kg" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h32rl-nl1kg" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h32rl-nl1kg in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc) 3716673536 bytes in 20.04 secs (185430948 bytes/sec) [Pipeline] { [Pipeline] cache + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.default_value.cli.2879.out cli tso query --pd=http://127.0.0.1:2379 + set +x + tso='449349076230078466 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349076230078466 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Fri Apr 26 19:24:53 CST 2024] <<<<<< START cdc server in default_value case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.default_value.29182920.out server --log-file /tmp/tidb_cdc_test/default_value/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/default_value/cdc_data --cluster-id default + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:24:56 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5ab21cfc-7616-4a1c-9e29-c2a4214d2108 {"id":"5ab21cfc-7616-4a1c-9e29-c2a4214d2108","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130694} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2667c3d5 5ab21cfc-7616-4a1c-9e29-c2a4214d2108 /tidb/cdc/default/default/upstream/7362135203645190389 {"id":7362135203645190389,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5ab21cfc-7616-4a1c-9e29-c2a4214d2108 {"id":"5ab21cfc-7616-4a1c-9e29-c2a4214d2108","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130694} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2667c3d5 5ab21cfc-7616-4a1c-9e29-c2a4214d2108 /tidb/cdc/default/default/upstream/7362135203645190389 {"id":7362135203645190389,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5ab21cfc-7616-4a1c-9e29-c2a4214d2108 {"id":"5ab21cfc-7616-4a1c-9e29-c2a4214d2108","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130694} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2667c3d5 5ab21cfc-7616-4a1c-9e29-c2a4214d2108 /tidb/cdc/default/default/upstream/7362135203645190389 {"id":7362135203645190389,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.default_value.cli.2970.out cli changefeed create --start-ts=449349076230078466 '--sink-uri=kafka://127.0.0.1:9092/ticdc-default-value-test-27538?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_capture/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Create changefeed successfully! ID: e93706ea-53b8-4841-a8c6-4c674a9bc2a9 Info: {"upstream_id":7362135203645190389,"namespace":"default","id":"e93706ea-53b8-4841-a8c6-4c674a9bc2a9","sink_uri":"kafka://127.0.0.1:9092/ticdc-default-value-test-27538?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:24:57.525896216+08:00","start_ts":449349076230078466,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349076230078466,"checkpoint_ts":449349076230078466,"checkpoint_time":"2024-04-26 19:24:52.406"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z9nq8-r4p88 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1735-z9nq8-bx7gt --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "3c9c71343d92ee480ed147096accd573887650f8" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1735-z9nq8" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z9nq8-r4p88" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z9nq8-r4p88" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z9nq8-r4p88" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z9nq8-r4p88 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test + set +x [Fri Apr 26 19:24:58 CST 2024] <<<<<< START kafka consumer in default_value case >>>>>> go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading github.com/google/uuid v1.6.0 go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading golang.org/x/time v0.5.0 go: downloading golang.org/x/sync v0.7.0 go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading go.uber.org/atomic v1.11.0 go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading github.com/coreos/go-semver v0.3.1 go: downloading golang.org/x/sys v0.19.0 go: downloading google.golang.org/protobuf v1.33.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading github.com/golang/protobuf v1.5.4 go: downloading golang.org/x/net v0.24.0 go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda go: downloading golang.org/x/text v0.14.0 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_stop_delay/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... start tidb cluster in /tmp/tidb_cdc_test/multi_capture Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/tidwall/btree v1.7.0 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/google/btree v1.1.2 go: downloading github.com/stretchr/testify v1.9.0 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading github.com/golang/snappy v0.0.4 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading golang.org/x/tools v0.20.0 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/klauspost/compress v1.17.8 go: downloading cloud.google.com/go v0.112.2 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading google.golang.org/api v0.170.0 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading golang.org/x/crypto v0.22.0 go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading github.com/jfcg/sixb v1.3.8 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading cloud.google.com/go/compute v1.25.1 go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/robfig/cron v1.2.0 go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/apache/thrift v0.16.0 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/kr/text v0.2.0 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading github.com/go-logr/logr v1.4.1 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/go-logr/stdr v1.2.2 go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 go: downloading github.com/DataDog/zstd v1.5.5 start tidb cluster in /tmp/tidb_cdc_test/processor_stop_delay Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/google/s2a-go v0.1.7 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading github.com/jmespath/go-jmespath v0.4.0 go: downloading github.com/felixge/httpsnoop v1.0.4 Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc) 3716673536 bytes in 18.08 secs (205512445 bytes/sec) [Pipeline] { [Pipeline] cache VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689bda2c0004 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6hf3q-f799q, pid:6976, start at 2024-04-26 19:25:12.205140851 +0800 CST m=+6.010428987 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:27:12.212 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:25:12.203 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:15:12.203 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689bcb540015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1, pid:7716, start at 2024-04-26 19:25:11.270762105 +0800 CST m=+5.123114216 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:27:11.279 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:25:11.253 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:15:11.253 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689bcb540015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1, pid:7716, start at 2024-04-26 19:25:11.270762105 +0800 CST m=+5.123114216 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:27:11.279 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:25:11.253 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:15:11.253 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689bd62c0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1, pid:7791, start at 2024-04-26 19:25:11.969325046 +0800 CST m=+5.772388814 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:27:11.977 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:25:11.947 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:15:11.947 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/multi_capture/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/multi_capture/tiflash/log/error.log arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_capture/tiflash-proxy.toml"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_capture/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_capture/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689bda2c0004 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6hf3q-f799q, pid:6976, start at 2024-04-26 19:25:12.205140851 +0800 CST m=+6.010428987 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:27:12.212 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:25:12.203 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:15:12.203 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689bce600015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6hf3q-f799q, pid:7062, start at 2024-04-26 19:25:11.495356029 +0800 CST m=+5.247928455 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:27:11.503 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:25:11.499 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:15:11.499 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/processor_stop_delay/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/processor_stop_delay/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/processor_stop_delay/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/processor_stop_delay/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/processor_stop_delay/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.cli.9119.out cli tso query --pd=http://127.0.0.1:2379 + set +x + tso='449349082219282439 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349082219282439 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Fri Apr 26 19:25:16 CST 2024] <<<<<< START cdc server in processor_stop_delay case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/processorStopDelay=1*sleep(10000)' + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.processor_stop_delay.84478449.out server --log-file /tmp/tidb_cdc_test/processor_stop_delay/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/processor_stop_delay/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 ***************** properties ***************** "workload"="core" "requestdistribution"="uniform" "operationcount"="0" "mysql.host"="127.0.0.1" "readproportion"="0" "dotransactions"="false" "threadcount"="2" "readallfields"="true" "mysql.user"="root" "mysql.db"="multi_capture_1" "recordcount"="10" "insertproportion"="0" "updateproportion"="0" "scanproportion"="0" "mysql.port"="4000" ********************************************** Run finished, takes 8.263582ms INSERT - Takes(s): 0.0, Count: 10, OPS: 2231.5, Avg(us): 1576, Min(us): 937, Max(us): 3672, 95th(us): 4000, 99th(us): 4000 ***************** properties ***************** "readallfields"="true" "scanproportion"="0" "readproportion"="0" "workload"="core" "mysql.host"="127.0.0.1" "insertproportion"="0" "updateproportion"="0" "recordcount"="10" "dotransactions"="false" "mysql.user"="root" "threadcount"="2" "operationcount"="0" "requestdistribution"="uniform" "mysql.db"="multi_capture_2" "mysql.port"="4000" ********************************************** Run finished, takes 8.008346ms INSERT - Takes(s): 0.0, Count: 10, OPS: 2280.2, Avg(us): 1532, Min(us): 906, Max(us): 3537, 95th(us): 4000, 99th(us): 4000 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:25:19 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f524f0b4-5fca-42c5-aea8-b7b7d6460b75 {"id":"f524f0b4-5fca-42c5-aea8-b7b7d6460b75","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130716} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a26d105c9 f524f0b4-5fca-42c5-aea8-b7b7d6460b75 /tidb/cdc/default/default/upstream/7362135314679518381 {"id":7362135314679518381,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f524f0b4-5fca-42c5-aea8-b7b7d6460b75 {"id":"f524f0b4-5fca-42c5-aea8-b7b7d6460b75","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130716} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a26d105c9 f524f0b4-5fca-42c5-aea8-b7b7d6460b75 /tidb/cdc/default/default/upstream/7362135314679518381 {"id":7362135314679518381,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f524f0b4-5fca-42c5-aea8-b7b7d6460b75 {"id":"f524f0b4-5fca-42c5-aea8-b7b7d6460b75","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130716} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a26d105c9 f524f0b4-5fca-42c5-aea8-b7b7d6460b75 /tidb/cdc/default/default/upstream/7362135314679518381 {"id":7362135314679518381,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Fri Apr 26 19:25:19 CST 2024] <<<<<< START kafka consumer in processor_stop_delay case >>>>>> ***************** properties ***************** "readproportion"="0" "workload"="core" "recordcount"="10" "insertproportion"="0" "dotransactions"="false" "updateproportion"="0" "mysql.port"="4000" "readallfields"="true" "mysql.host"="127.0.0.1" "scanproportion"="0" "threadcount"="2" "requestdistribution"="uniform" "operationcount"="0" "mysql.db"="multi_capture_3" "mysql.user"="root" ********************************************** Run finished, takes 8.141534ms INSERT - Takes(s): 0.0, Count: 10, OPS: 2291.7, Avg(us): 1546, Min(us): 897, Max(us): 3665, 95th(us): 4000, 99th(us): 4000 table processor_stop_delay.t not exists for 1-th check, retry later ***************** properties ***************** "requestdistribution"="uniform" "scanproportion"="0" "threadcount"="2" "operationcount"="0" "mysql.db"="multi_capture_4" "insertproportion"="0" "recordcount"="10" "mysql.port"="4000" "readproportion"="0" "workload"="core" "dotransactions"="false" "mysql.host"="127.0.0.1" "mysql.user"="root" "updateproportion"="0" "readallfields"="true" ********************************************** Run finished, takes 8.294492ms INSERT - Takes(s): 0.0, Count: 9, OPS: 2054.1, Avg(us): 1329, Min(us): 890, Max(us): 3753, 95th(us): 4000, 99th(us): 4000 [Fri Apr 26 19:25:20 CST 2024] <<<<<< START cdc server in multi_capture case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.92529254.out server --log-file /tmp/tidb_cdc_test/multi_capture/cdc1.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_capture/cdc_data1 --cluster-id default --addr 127.0.0.1:8301 + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info * About to connect() to 127.0.0.1 port 8301 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8301; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table processor_stop_delay.t not exists for 2-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info * About to connect() to 127.0.0.1 port 8301 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8301 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8301 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:25:24 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/77bf6731-8cce-425f-9e5b-0aaf279d0df8 {"id":"77bf6731-8cce-425f-9e5b-0aaf279d0df8","address":"127.0.0.1:8301","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130721} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a26c7124d 77bf6731-8cce-425f-9e5b-0aaf279d0df8 /tidb/cdc/default/default/upstream/7362135311697695248 {"id":7362135311697695248,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/77bf6731-8cce-425f-9e5b-0aaf279d0df8 {"id":"77bf6731-8cce-425f-9e5b-0aaf279d0df8","address":"127.0.0.1:8301","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130721} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a26c7124d 77bf6731-8cce-425f-9e5b-0aaf279d0df8 /tidb/cdc/default/default/upstream/7362135311697695248 {"id":7362135311697695248,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/77bf6731-8cce-425f-9e5b-0aaf279d0df8 {"id":"77bf6731-8cce-425f-9e5b-0aaf279d0df8","address":"127.0.0.1:8301","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130721} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a26c7124d 77bf6731-8cce-425f-9e5b-0aaf279d0df8 /tidb/cdc/default/default/upstream/7362135311697695248 {"id":7362135311697695248,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Fri Apr 26 19:25:24 CST 2024] <<<<<< START cdc server in multi_capture case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.93089310.out server --log-file /tmp/tidb_cdc_test/multi_capture/cdc2.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_capture/cdc_data2 --cluster-id default --addr 127.0.0.1:8302 ++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info * About to connect() to 127.0.0.1 port 8302 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8302; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table processor_stop_delay.t exists check diff successfully check diff failed 1-th time, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info * About to connect() to 127.0.0.1 port 8302 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8302 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8302 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:25:27 GMT < Content-Length: 1271 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/54113a4b-4210-4cd7-bb16-371193aceb1b {"id":"54113a4b-4210-4cd7-bb16-371193aceb1b","address":"127.0.0.1:8302","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130724} /tidb/cdc/default/__cdc_meta__/capture/77bf6731-8cce-425f-9e5b-0aaf279d0df8 {"id":"77bf6731-8cce-425f-9e5b-0aaf279d0df8","address":"127.0.0.1:8301","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130721} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a26c7124d 77bf6731-8cce-425f-9e5b-0aaf279d0df8 /tidb/cdc/default/__cdc_meta__/owner/22318f1a26c71285 54113a4b-4210-4cd7-bb16-371193aceb1b /tidb/cdc/default/default/upstream/7362135311697695248 {"id":7362135311697695248,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/54113a4b-4210-4cd7-bb16-371193aceb1b {"id":"54113a4b-4210-4cd7-bb16-371193aceb1b","address":"127.0.0.1:8302","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130724} /tidb/cdc/default/__cdc_meta__/capture/77bf6731-8cce-425f-9e5b-0aaf279d0df8 {"id":"77bf6731-8cce-425f-9e5b-0aaf279d0df8","address":"127.0.0.1:8301","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130721} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a26c7124d 77bf6731-8cce-425f-9e5b-0aaf279d0df8 /tidb/cdc/default/__cdc_meta__/owner/22318f1a26c71285 54113a4b-4210-4cd7-bb16-371193aceb1b /tidb/cdc/default/default/upstream/7362135311697695248 {"id":7362135311697695248,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/54113a4b-4210-4cd7-bb16-371193aceb1b {"id":"54113a4b-4210-4cd7-bb16-371193aceb1b","address":"127.0.0.1:8302","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130724} /tidb/cdc/default/__cdc_meta__/capture/77bf6731-8cce-425f-9e5b-0aaf279d0df8 {"id":"77bf6731-8cce-425f-9e5b-0aaf279d0df8","address":"127.0.0.1:8301","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130721} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a26c7124d 77bf6731-8cce-425f-9e5b-0aaf279d0df8 /tidb/cdc/default/__cdc_meta__/owner/22318f1a26c71285 54113a4b-4210-4cd7-bb16-371193aceb1b /tidb/cdc/default/default/upstream/7362135311697695248 {"id":7362135311697695248,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Fri Apr 26 19:25:27 CST 2024] <<<<<< START cdc server in multi_capture case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8303/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.93639365.out server --log-file /tmp/tidb_cdc_test/multi_capture/cdc3.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_capture/cdc_data3 --cluster-id default --addr 127.0.0.1:8303 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8303/debug/info * About to connect() to 127.0.0.1 port 8303 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8303; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff failed 2-th time, retry later check diff failed 3-th time, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8303/debug/info * About to connect() to 127.0.0.1 port 8303 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8303 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8303 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:25:30 GMT < Content-Length: 1750 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2e926c07-5531-4498-8bc0-8077a3f24f8f {"id":"2e926c07-5531-4498-8bc0-8077a3f24f8f","address":"127.0.0.1:8303","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130727} /tidb/cdc/default/__cdc_meta__/capture/54113a4b-4210-4cd7-bb16-371193aceb1b {"id":"54113a4b-4210-4cd7-bb16-371193aceb1b","address":"127.0.0.1:8302","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130724} /tidb/cdc/default/__cdc_meta__/capture/77bf6731-8cce-425f-9e5b-0aaf279d0df8 {"id":"77bf6731-8cce-425f-9e5b-0aaf279d0df8","address":"127.0.0.1:8301","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130721} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a26c7124d 77bf6731-8cce-425f-9e5b-0aaf279d0df8 /tidb/cdc/default/__cdc_meta__/owner/22318f1a26c71285 54113a4b-4210-4cd7-bb16-371193aceb1b /tidb/cdc/default/__cdc_meta__/owner/22318f1a26c712a5 2e926c07-5531-4498-8bc0-8077a3f24f8f /tidb/cdc/default/default/upstream/7362135311697695248 {"id":7362135311697695248,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2e926c07-5531-4498-8bc0-8077a3f24f8f {"id":"2e926c07-5531-4498-8bc0-8077a3f24f8f","address":"127.0.0.1:8303","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130727} /tidb/cdc/default/__cdc_meta__/capture/54113a4b-4210-4cd7-bb16-371193aceb1b {"id":"54113a4b-4210-4cd7-bb16-371193aceb1b","address":"127.0.0.1:8302","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130724} /tidb/cdc/default/__cdc_meta__/capture/77bf6731-8cce-425f-9e5b-0aaf279d0df8 {"id":"77bf6731-8cce-425f-9e5b-0aaf279d0df8","address":"127.0.0.1:8301","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130721} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a26c7124d 77bf6731-8cce-425f-9e5b-0aaf279d0df8 /tidb/cdc/default/__cdc_meta__/owner/22318f1a26c71285 54113a4b-4210-4cd7-bb16-371193aceb1b /tidb/cdc/default/__cdc_meta__/owner/22318f1a26c712a5 2e926c07-5531-4498-8bc0-8077a3f24f8f /tidb/cdc/default/default/upstream/7362135311697695248 {"id":7362135311697695248,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2e926c07-5531-4498-8bc0-8077a3f24f8f {"id":"2e926c07-5531-4498-8bc0-8077a3f24f8f","address":"127.0.0.1:8303","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130727} /tidb/cdc/default/__cdc_meta__/capture/54113a4b-4210-4cd7-bb16-371193aceb1b {"id":"54113a4b-4210-4cd7-bb16-371193aceb1b","address":"127.0.0.1:8302","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130724} /tidb/cdc/default/__cdc_meta__/capture/77bf6731-8cce-425f-9e5b-0aaf279d0df8 {"id":"77bf6731-8cce-425f-9e5b-0aaf279d0df8","address":"127.0.0.1:8301","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130721} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a26c7124d 77bf6731-8cce-425f-9e5b-0aaf279d0df8 /tidb/cdc/default/__cdc_meta__/owner/22318f1a26c71285 54113a4b-4210-4cd7-bb16-371193aceb1b /tidb/cdc/default/__cdc_meta__/owner/22318f1a26c712a5 2e926c07-5531-4498-8bc0-8077a3f24f8f /tidb/cdc/default/default/upstream/7362135311697695248 {"id":7362135311697695248,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.cli.9416.out cli changefeed create --start-ts=449349082219282439 '--sink-uri=kafka://127.0.0.1:9092/ticdc-multi-capture-test-4270?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --server=127.0.0.1:8301 Create changefeed successfully! ID: 35dc7688-8e44-4493-afd2-d9aea1a2558c Info: {"upstream_id":7362135311697695248,"namespace":"default","id":"35dc7688-8e44-4493-afd2-d9aea1a2558c","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-capture-test-4270?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:25:30.638092575+08:00","start_ts":449349082219282439,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349082219282439,"checkpoint_ts":449349082219282439,"checkpoint_time":"2024-04-26 19:25:15.253"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc) 3716673536 bytes in 15.47 secs (240178377 bytes/sec) [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc already exists) + set +x [Fri Apr 26 19:25:32 CST 2024] <<<<<< START kafka consumer in multi_capture case >>>>>> table multi_capture_1.usertable not exists for 1-th check, retry later check diff failed 4-th time, retry later [Pipeline] sh [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] { [Pipeline] { [Pipeline] // cache [Pipeline] } [Pipeline] { table multi_capture_1.usertable not exists for 2-th check, retry later check diff failed 5-th time, retry later [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] checkout [Pipeline] checkout The recommended git tool is: git The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] sh [Pipeline] sh No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@11fe21ed; decorates RemoteLauncher[hudson.remoting.Channel@55ada9f0:JNLP4-connect connection from 10.233.86.123/10.233.86.123:57410] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 [Pipeline] sh No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@1e432acf; decorates RemoteLauncher[hudson.remoting.Channel@281e6695:JNLP4-connect connection from 10.233.68.192/10.233.68.192:56480] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@585ca54c; decorates RemoteLauncher[hudson.remoting.Channel@2aed2189:JNLP4-connect connection from 10.233.70.177/10.233.70.177:60706] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G09 Run cases: gc_safepoint changefeed_pause_resume cli savepoint synced_status PROW_JOB_ID=586cc306-42d6-4207-90bf-aa4c613fe26b JENKINS_NODE_COOKIE=bd4d12b1-f54b-4fc4-a6fe-a4d4632e8f14 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1783813226958426112","prowjobid":"586cc306-42d6-4207-90bf-aa4c613fe26b","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","base_link":"https://github.com/pingcap/tiflow/commit/d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","pulls":[{"number":10904,"author":"CharlesCheung96","sha":"16f5d59f936001f6d7031387873b3c668f3c5ae6","title":"redo(ticdc): enable pprof and set memory limit for redo applier","link":"https://github.com/pingcap/tiflow/pull/10904","commit_link":"https://github.com/pingcap/tiflow/pull/10904/commits/16f5d59f936001f6d7031387873b3c668f3c5ae6","author_link":"https://github.com/CharlesCheung96"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1735 TEST_GROUP=G09 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1783813226958426112 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=618760b8f23481032f6d0010db684d46f840e8e1 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-4851m GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-4851m pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1735 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/gc_safepoint/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G04 [Pipeline] // timeout [Pipeline] } table multi_capture_1.usertable exists table multi_capture_2.usertable exists table multi_capture_3.usertable not exists for 1-th check, retry later check diff failed 6-th time, retry later Run cases: foreign_key ddl_puller_lag ddl_only_block_related_table changefeed_auto_stop PROW_JOB_ID=586cc306-42d6-4207-90bf-aa4c613fe26b JENKINS_NODE_COOKIE=4b095cb0-a0c4-437e-b5c2-581ce20e798e BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z94t0-92h56 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1783813226958426112","prowjobid":"586cc306-42d6-4207-90bf-aa4c613fe26b","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","base_link":"https://github.com/pingcap/tiflow/commit/d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","pulls":[{"number":10904,"author":"CharlesCheung96","sha":"16f5d59f936001f6d7031387873b3c668f3c5ae6","title":"redo(ticdc): enable pprof and set memory limit for redo applier","link":"https://github.com/pingcap/tiflow/pull/10904","commit_link":"https://github.com/pingcap/tiflow/pull/10904/commits/16f5d59f936001f6d7031387873b3c668f3c5ae6","author_link":"https://github.com/CharlesCheung96"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1735 TEST_GROUP=G04 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1783813226958426112 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=618760b8f23481032f6d0010db684d46f840e8e1 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-z94t0 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-z94t0 pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z94t0-92h56 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z94t0-92h56 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1735 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/foreign_key/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] // stage + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G05 [Pipeline] } [Pipeline] // container [Pipeline] } Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1735-llqb6-jpndj is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1735-llqb6-09pw9 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "9e5f88404deaa7ee87106b04863f884bc8130764" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1735-llqb6" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-llqb6-jpndj" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-llqb6-jpndj" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-llqb6-jpndj" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1735-llqb6-jpndj in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] // withEnv [Pipeline] } [Pipeline] { Run cases: charset_gbk ddl_manager multi_source PROW_JOB_ID=586cc306-42d6-4207-90bf-aa4c613fe26b JENKINS_NODE_COOKIE=a2573852-c1c6-4f64-98fb-4f42031cc241 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2x98t-8x2nm HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1783813226958426112","prowjobid":"586cc306-42d6-4207-90bf-aa4c613fe26b","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","base_link":"https://github.com/pingcap/tiflow/commit/d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","pulls":[{"number":10904,"author":"CharlesCheung96","sha":"16f5d59f936001f6d7031387873b3c668f3c5ae6","title":"redo(ticdc): enable pprof and set memory limit for redo applier","link":"https://github.com/pingcap/tiflow/pull/10904","commit_link":"https://github.com/pingcap/tiflow/pull/10904/commits/16f5d59f936001f6d7031387873b3c668f3c5ae6","author_link":"https://github.com/CharlesCheung96"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1735 TEST_GROUP=G05 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1783813226958426112 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=618760b8f23481032f6d0010db684d46f840e8e1 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-2x98t GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-2x98t pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2x98t-8x2nm GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2x98t-8x2nm GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1735 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/charset_gbk/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] checkout The recommended git tool is: git [Pipeline] // stage [Pipeline] } Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@45ace8e7; decorates RemoteLauncher[hudson.remoting.Channel@7ecd533f:JNLP4-connect connection from 10.233.84.135/10.233.84.135:45976] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Avoid second fetch Checking out Revision cb9bc9e8822a01a5d59a2f670fb429e588065145 (origin/main) Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 table multi_capture_3.usertable exists table multi_capture_4.usertable not exists for 1-th check, retry later Commit message: "feat(tidb): test flashbacktest package (#2942)" [Pipeline] withEnv [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] stage [Pipeline] { (Test) [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache check diff successfully wait process cdc.test exit for 1-th time... > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f cb9bc9e8822a01a5d59a2f670fb429e588065145 # timeout=10 start tidb cluster in /tmp/tidb_cdc_test/gc_safepoint Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Avoid second fetch Checking out Revision cb9bc9e8822a01a5d59a2f670fb429e588065145 (origin/main) Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... wait process cdc.test exit for 2-th time... Avoid second fetch Checking out Revision cb9bc9e8822a01a5d59a2f670fb429e588065145 (origin/main) Commit message: "feat(tidb): test flashbacktest package (#2942)" Commit message: "feat(tidb): test flashbacktest package (#2942)" table multi_capture_4.usertable exists check diff failed 1-th time, retry later cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:25:40 CST 2024] <<<<<< run test case processor_stop_delay success! >>>>>> > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Avoid second fetch Checking out Revision cb9bc9e8822a01a5d59a2f670fb429e588065145 (origin/main) start tidb cluster in /tmp/tidb_cdc_test/foreign_key Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f cb9bc9e8822a01a5d59a2f670fb429e588065145 # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f cb9bc9e8822a01a5d59a2f670fb429e588065145 # timeout=10 Commit message: "feat(tidb): test flashbacktest package (#2942)" start tidb cluster in /tmp/tidb_cdc_test/charset_gbk Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f cb9bc9e8822a01a5d59a2f670fb429e588065145 # timeout=10 check diff failed 2-th time, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release check diff successfully ***************** properties ***************** "dotransactions"="false" "readproportion"="0" "recordcount"="20" "scanproportion"="0" "mysql.user"="root" "mysql.db"="multi_capture_1" "requestdistribution"="uniform" "insertproportion"="0" "mysql.host"="127.0.0.1" "readallfields"="true" "updateproportion"="0" "threadcount"="2" "operationcount"="0" "workload"="core" "mysql.port"="4000" ********************************************** Run finished, takes 10.955031ms INSERT - Takes(s): 0.0, Count: 20, OPS: 2037.3, Avg(us): 946, Min(us): 466, Max(us): 1720, 95th(us): 2000, 99th(us): 2000 ***************** properties ***************** "updateproportion"="0" "mysql.host"="127.0.0.1" "mysql.user"="root" "insertproportion"="0" "readproportion"="0" "recordcount"="20" "dotransactions"="false" "operationcount"="0" "scanproportion"="0" "readallfields"="true" "mysql.port"="4000" "threadcount"="2" "mysql.db"="multi_capture_2" "requestdistribution"="uniform" "workload"="core" ********************************************** Run finished, takes 9.508313ms INSERT - Takes(s): 0.0, Count: 20, OPS: 2311.5, Avg(us): 818, Min(us): 430, Max(us): 1413, 95th(us): 2000, 99th(us): 2000 ***************** properties ***************** "insertproportion"="0" "workload"="core" "threadcount"="2" "operationcount"="0" "mysql.user"="root" "recordcount"="20" "readproportion"="0" "requestdistribution"="uniform" "mysql.host"="127.0.0.1" "updateproportion"="0" "scanproportion"="0" "mysql.db"="multi_capture_3" "readallfields"="true" "mysql.port"="4000" "dotransactions"="false" ********************************************** Run finished, takes 9.631061ms INSERT - Takes(s): 0.0, Count: 20, OPS: 2286.1, Avg(us): 804, Min(us): 397, Max(us): 1541, 95th(us): 2000, 99th(us): 2000 ***************** properties ***************** "requestdistribution"="uniform" "dotransactions"="false" "operationcount"="0" "mysql.host"="127.0.0.1" "scanproportion"="0" "workload"="core" "readallfields"="true" "mysql.db"="multi_capture_4" "recordcount"="20" "readproportion"="0" "mysql.port"="4000" "updateproportion"="0" "threadcount"="2" "insertproportion"="0" "mysql.user"="root" ********************************************** Run finished, takes 21.116277ms INSERT - Takes(s): 0.0, Count: 20, OPS: 1483.4, Avg(us): 2047, Min(us): 699, Max(us): 7622, 95th(us): 8000, 99th(us): 8000 check diff failed 1-th time, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release check diff successfully ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 1-th time... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 2-th time... Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 3-th time... cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Fri Apr 26 19:25:50 CST 2024] <<<<<< run test case multi_capture success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689e34e40003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z94t0-92h56, pid:1344, start at 2024-04-26 19:25:50.777887777 +0800 CST m=+5.158518909 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:27:50.785 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:25:50.777 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:15:50.777 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689e34e40003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z94t0-92h56, pid:1344, start at 2024-04-26 19:25:50.777887777 +0800 CST m=+5.158518909 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:27:50.785 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:25:50.777 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:15:50.777 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689e34d40017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z94t0-92h56, pid:1427, start at 2024-04-26 19:25:50.806912789 +0800 CST m=+5.132033266 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:27:50.813 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:25:50.773 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:15:50.773 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/foreign_key/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/foreign_key/tiflash/log/error.log arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/foreign_key/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/foreign_key/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/foreign_key/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/partition_table/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc) 3716673536 bytes in 12.26 secs (303166947 bytes/sec) [Pipeline] { VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689e2efc0015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb, pid:1411, start at 2024-04-26 19:25:50.445164139 +0800 CST m=+5.198133621 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:27:50.451 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:25:50.449 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:15:50.449 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689e2efc0015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb, pid:1411, start at 2024-04-26 19:25:50.445164139 +0800 CST m=+5.198133621 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:27:50.451 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:25:50.449 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:15:50.449 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689e30700017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb, pid:1493, start at 2024-04-26 19:25:50.532639821 +0800 CST m=+5.234031763 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:27:50.539 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:25:50.542 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:15:50.542 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/gc_safepoint/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/gc_safepoint/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/gc_safepoint/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/gc_safepoint/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/gc_safepoint/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.foreign_key.cli.2824.out cli tso query --pd=http://127.0.0.1:2379 [Pipeline] container [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] sh [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] { [Pipeline] } [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] // timeout [Pipeline] } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] // container [Pipeline] sh [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G01 Run cases: http_api http_api_tls api_v2 http_api_tls_with_user_auth cli_tls_with_auth kafka_simple_basic kafka_simple_basic_avro kafka_simple_handle_key_only kafka_simple_handle_key_only_avro kafka_simple_claim_check kafka_simple_claim_check_avro canal_json_adapter_compatibility canal_json_basic canal_json_content_compatible multi_topics avro_basic canal_json_handle_key_only open_protocol_handle_key_only canal_json_claim_check open_protocol_claim_check canal_json_storage_basic canal_json_storage_partition_table multi_tables_ddl PROW_JOB_ID=586cc306-42d6-4207-90bf-aa4c613fe26b JENKINS_NODE_COOKIE=83cf8bc2-2c5f-49f2-93f4-b83dd823f31a BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7vksj-r2cpw HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1783813226958426112","prowjobid":"586cc306-42d6-4207-90bf-aa4c613fe26b","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","base_link":"https://github.com/pingcap/tiflow/commit/d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","pulls":[{"number":10904,"author":"CharlesCheung96","sha":"16f5d59f936001f6d7031387873b3c668f3c5ae6","title":"redo(ticdc): enable pprof and set memory limit for redo applier","link":"https://github.com/pingcap/tiflow/pull/10904","commit_link":"https://github.com/pingcap/tiflow/pull/10904/commits/16f5d59f936001f6d7031387873b3c668f3c5ae6","author_link":"https://github.com/CharlesCheung96"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1735 TEST_GROUP=G01 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1783813226958426112 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=618760b8f23481032f6d0010db684d46f840e8e1 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-7vksj GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-7vksj pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7vksj-r2cpw GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7vksj-r2cpw GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1735 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/http_api/run.sh using Sink-Type: kafka... <<================= [Fri Apr 26 19:25:55 CST 2024] <<<<<< run test case http_api success! >>>>>> + set +x + tso='449349092449189889 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349092449189889 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Fri Apr 26 19:25:55 CST 2024] <<<<<< START cdc server in foreign_key case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.foreign_key.28592861.out server --log-file /tmp/tidb_cdc_test/foreign_key/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/foreign_key/cdc_data --cluster-id default + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [Fri Apr 26 19:25:55 CST 2024] <<<<<< START cdc server in gc_safepoint case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/pkg/txnutil/gc/InjectGcSafepointUpdateInterval=return(500)' + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.gc_safepoint.28342836.out server --log-file /tmp/tidb_cdc_test/gc_safepoint/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/gc_safepoint/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689e86200009 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2x98t-8x2nm, pid:1321, start at 2024-04-26 19:25:55.994541185 +0800 CST m=+7.724607455 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:27:56.012 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:25:56.026 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:15:56.026 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:25:58 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/1fc6543a-4110-408d-98c8-e0f2614ae574 {"id":"1fc6543a-4110-408d-98c8-e0f2614ae574","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130755} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2761bcca 1fc6543a-4110-408d-98c8-e0f2614ae574 /tidb/cdc/default/default/upstream/7362135480874022932 {"id":7362135480874022932,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/1fc6543a-4110-408d-98c8-e0f2614ae574 {"id":"1fc6543a-4110-408d-98c8-e0f2614ae574","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130755} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2761bcca 1fc6543a-4110-408d-98c8-e0f2614ae574 /tidb/cdc/default/default/upstream/7362135480874022932 {"id":7362135480874022932,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689e86200009 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2x98t-8x2nm, pid:1321, start at 2024-04-26 19:25:55.994541185 +0800 CST m=+7.724607455 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:27:56.012 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:25:56.026 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:15:56.026 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689e8940000c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2x98t-8x2nm, pid:1365, start at 2024-04-26 19:25:56.202970112 +0800 CST m=+7.849334969 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:27:56.223 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:25:56.226 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:15:56.226 +0800 All versions after safe point can be accessed. (DO NOT EDIT) + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/1fc6543a-4110-408d-98c8-e0f2614ae574 {"id":"1fc6543a-4110-408d-98c8-e0f2614ae574","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130755} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2761bcca 1fc6543a-4110-408d-98c8-e0f2614ae574 /tidb/cdc/default/default/upstream/7362135480874022932 {"id":7362135480874022932,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Fri Apr 26 19:25:58 CST 2024] <<<<<< START kafka consumer in gc_safepoint case >>>>>> Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/charset_gbk/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/charset_gbk/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/charset_gbk/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/charset_gbk/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/charset_gbk/tiflash-proxy.toml"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } start tidb cluster in /tmp/tidb_cdc_test/partition_table Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... 0 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:25:58 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/fd50cb91-61f3-4c24-b8e8-ae7336767e4b {"id":"fd50cb91-61f3-4c24-b8e8-ae7336767e4b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130756} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a276335cf fd50cb91-61f3-4c24-b8e8-ae7336767e4b /tidb/cdc/default/default/upstream/7362135478122314476 {"id":7362135478122314476,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/fd50cb91-61f3-4c24-b8e8-ae7336767e4b {"id":"fd50cb91-61f3-4c24-b8e8-ae7336767e4b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130756} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a276335cf fd50cb91-61f3-4c24-b8e8-ae7336767e4b /tidb/cdc/default/default/upstream/7362135478122314476 {"id":7362135478122314476,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/fd50cb91-61f3-4c24-b8e8-ae7336767e4b {"id":"fd50cb91-61f3-4c24-b8e8-ae7336767e4b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130756} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a276335cf fd50cb91-61f3-4c24-b8e8-ae7336767e4b /tidb/cdc/default/default/upstream/7362135478122314476 {"id":7362135478122314476,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.foreign_key.cli.2920.out cli changefeed create --start-ts=449349092449189889 '--sink-uri=kafka://127.0.0.1:9092/ticdc-foreign-key-test-23746?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: f86ddec0-20f7-4ed8-ad03-723c1c23caeb Info: {"upstream_id":7362135478122314476,"namespace":"default","id":"f86ddec0-20f7-4ed8-ad03-723c1c23caeb","sink_uri":"kafka://127.0.0.1:9092/ticdc-foreign-key-test-23746?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:25:59.285253195+08:00","start_ts":449349092449189889,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349092449189889,"checkpoint_ts":449349092449189889,"checkpoint_time":"2024-04-26 19:25:54.277"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... check diff failed 1-th time, retry later + set +x [Fri Apr 26 19:26:00 CST 2024] <<<<<< START kafka consumer in foreign_key case >>>>>> [Fri Apr 26 19:26:01 CST 2024] <<<<<< START cdc server in charset_gbk case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) + GO_FAILPOINTS= + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.charset_gbk.27012703.out server --log-file /tmp/tidb_cdc_test/charset_gbk/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/charset_gbk/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Verifying downstream PD is started... check diff failed 2-th time, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/run.sh using Sink-Type: kafka... <<================= +++ dirname /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/run.sh ++ cd /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo ++ pwd + CUR=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo + source /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/../_utils/test_prepare ++ UP_TIDB_HOST=127.0.0.1 ++ UP_TIDB_PORT=4000 ++ UP_TIDB_OTHER_PORT=4001 ++ UP_TIDB_STATUS=10080 ++ UP_TIDB_OTHER_STATUS=10081 ++ DOWN_TIDB_HOST=127.0.0.1 ++ DOWN_TIDB_PORT=3306 ++ DOWN_TIDB_STATUS=20080 ++ TLS_TIDB_HOST=127.0.0.1 ++ TLS_TIDB_PORT=3307 ++ TLS_TIDB_STATUS=30080 ++ UP_PD_HOST_1=127.0.0.1 ++ UP_PD_PORT_1=2379 ++ UP_PD_PEER_PORT_1=2380 ++ UP_PD_HOST_2=127.0.0.1 ++ UP_PD_PORT_2=2679 ++ UP_PD_PEER_PORT_2=2680 ++ UP_PD_HOST_3=127.0.0.1 ++ UP_PD_PORT_3=2779 ++ UP_PD_PEER_PORT_3=2780 ++ DOWN_PD_HOST=127.0.0.1 ++ DOWN_PD_PORT=2479 ++ DOWN_PD_PEER_PORT=2480 ++ TLS_PD_HOST=127.0.0.1 ++ TLS_PD_PORT=2579 ++ TLS_PD_PEER_PORT=2580 ++ UP_TIKV_HOST_1=127.0.0.1 ++ UP_TIKV_PORT_1=20160 ++ UP_TIKV_STATUS_PORT_1=20181 ++ UP_TIKV_HOST_2=127.0.0.1 ++ UP_TIKV_PORT_2=20161 ++ UP_TIKV_STATUS_PORT_2=20182 ++ UP_TIKV_HOST_3=127.0.0.1 ++ UP_TIKV_PORT_3=20162 ++ UP_TIKV_STATUS_PORT_3=20183 ++ DOWN_TIKV_HOST=127.0.0.1 ++ DOWN_TIKV_PORT=21160 ++ DOWN_TIKV_STATUS_PORT=21180 ++ TLS_TIKV_HOST=127.0.0.1 ++ TLS_TIKV_PORT=22160 ++ TLS_TIKV_STATUS_PORT=22180 +++ cat /tmp/tidb_cdc_test/KAFKA_VERSION +++ echo 2.4.1 ++ KAFKA_VERSION=2.4.1 + WORK_DIR=/tmp/tidb_cdc_test/synced_status_with_redo + CDC_BINARY=cdc.test + SINK_TYPE=kafka + CDC_COUNT=3 + DB_COUNT=4 + trap stop_tidb_cluster EXIT + run_normal_case_and_unavailable_pd conf/changefeed-redo.toml + rm -rf /tmp/tidb_cdc_test/synced_status_with_redo + mkdir -p /tmp/tidb_cdc_test/synced_status_with_redo + start_tidb_cluster --workdir /tmp/tidb_cdc_test/synced_status_with_redo The 1 times to try to start tidb cluster... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/http_api_tls/run.sh using Sink-Type: kafka... <<================= [Fri Apr 26 19:26:03 CST 2024] <<<<<< run test case http_api_tls success! >>>>>> check diff successfully check_safepoint_forward http://127.0.0.1:2379 7362135480874022932 449349094814253059 449349093621497857 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:26:04 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a522da2b-9dce-425b-b01c-c0299e6ce6ba {"id":"a522da2b-9dce-425b-b01c-c0299e6ce6ba","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130761} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a276819d1 a522da2b-9dce-425b-b01c-c0299e6ce6ba /tidb/cdc/default/default/upstream/7362135494312101565 {"id":7362135494312101565,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a522da2b-9dce-425b-b01c-c0299e6ce6ba {"id":"a522da2b-9dce-425b-b01c-c0299e6ce6ba","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130761} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a276819d1 a522da2b-9dce-425b-b01c-c0299e6ce6ba /tidb/cdc/default/default/upstream/7362135494312101565 {"id":7362135494312101565,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + grep -q 'etcd info' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a522da2b-9dce-425b-b01c-c0299e6ce6ba {"id":"a522da2b-9dce-425b-b01c-c0299e6ce6ba","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130761} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a276819d1 a522da2b-9dce-425b-b01c-c0299e6ce6ba /tidb/cdc/default/default/upstream/7362135494312101565 {"id":7362135494312101565,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + break + set +x Create changefeed successfully! ID: 407ea580-9640-4aa5-b343-8787bdbdc6ec Info: {"upstream_id":7362135494312101565,"namespace":"default","id":"407ea580-9640-4aa5-b343-8787bdbdc6ec","sink_uri":"mysql://normal:xxxxx@127.0.0.1:3306/","create_time":"2024-04-26T19:26:04.472745382+08:00","start_ts":449349094218137601,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349094218137601,"checkpoint_ts":449349094218137601,"checkpoint_time":"2024-04-26 19:26:01.025"} [Fri Apr 26 19:26:04 CST 2024] <<<<<< START kafka consumer in charset_gbk case >>>>>> Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) run task successfully check_changefeed_state http://127.0.0.1:2379 91b3cf6d-977b-4a5c-b5ce-f167209d805d stopped null + endpoints=http://127.0.0.1:2379 + changefeed_id=91b3cf6d-977b-4a5c-b5ce-f167209d805d + expected_state=stopped + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 91b3cf6d-977b-4a5c-b5ce-f167209d805d -s + info='{ "upstream_id": 7362135480874022932, "namespace": "default", "id": "91b3cf6d-977b-4a5c-b5ce-f167209d805d", "state": "stopped", "checkpoint_tso": 449349095076397060, "checkpoint_time": "2024-04-26 19:26:04.299", "error": null }' + echo '{ "upstream_id": 7362135480874022932, "namespace": "default", "id": "91b3cf6d-977b-4a5c-b5ce-f167209d805d", "state": "stopped", "checkpoint_tso": 449349095076397060, "checkpoint_time": "2024-04-26 19:26:04.299", "error": null }' { "upstream_id": 7362135480874022932, "namespace": "default", "id": "91b3cf6d-977b-4a5c-b5ce-f167209d805d", "state": "stopped", "checkpoint_tso": 449349095076397060, "checkpoint_time": "2024-04-26 19:26:04.299", "error": null } ++ echo '{' '"upstream_id":' 7362135480874022932, '"namespace":' '"default",' '"id":' '"91b3cf6d-977b-4a5c-b5ce-f167209d805d",' '"state":' '"stopped",' '"checkpoint_tso":' 449349095076397060, '"checkpoint_time":' '"2024-04-26' '19:26:04.299",' '"error":' null '}' ++ jq -r .state + state=stopped + [[ ! stopped == \s\t\o\p\p\e\d ]] ++ echo '{' '"upstream_id":' 7362135480874022932, '"namespace":' '"default",' '"id":' '"91b3cf6d-977b-4a5c-b5ce-f167209d805d",' '"state":' '"stopped",' '"checkpoint_tso":' 449349095076397060, '"checkpoint_time":' '"2024-04-26' '19:26:04.299",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check_safepoint_equal http://127.0.0.1:2379 7362135480874022932 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/api_v2/run.sh using Sink-Type: kafka... <<================= start tidb cluster in /tmp/tidb_cdc_test/synced_status_with_redo Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... run task successfully check_changefeed_state http://127.0.0.1:2379 91b3cf6d-977b-4a5c-b5ce-f167209d805d normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=91b3cf6d-977b-4a5c-b5ce-f167209d805d + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 91b3cf6d-977b-4a5c-b5ce-f167209d805d -s ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + info='{ "upstream_id": 7362135480874022932, "namespace": "default", "id": "91b3cf6d-977b-4a5c-b5ce-f167209d805d", "state": "normal", "checkpoint_tso": 449349095076397060, "checkpoint_time": "2024-04-26 19:26:04.299", "error": null }' + echo '{ "upstream_id": 7362135480874022932, "namespace": "default", "id": "91b3cf6d-977b-4a5c-b5ce-f167209d805d", "state": "normal", "checkpoint_tso": 449349095076397060, "checkpoint_time": "2024-04-26 19:26:04.299", "error": null }' { "upstream_id": 7362135480874022932, "namespace": "default", "id": "91b3cf6d-977b-4a5c-b5ce-f167209d805d", "state": "normal", "checkpoint_tso": 449349095076397060, "checkpoint_time": "2024-04-26 19:26:04.299", "error": null } ++ echo '{' '"upstream_id":' 7362135480874022932, '"namespace":' '"default",' '"id":' '"91b3cf6d-977b-4a5c-b5ce-f167209d805d",' '"state":' '"normal",' '"checkpoint_tso":' 449349095076397060, '"checkpoint_time":' '"2024-04-26' '19:26:04.299",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7362135480874022932, '"namespace":' '"default",' '"id":' '"91b3cf6d-977b-4a5c-b5ce-f167209d805d",' '"state":' '"normal",' '"checkpoint_tso":' 449349095076397060, '"checkpoint_time":' '"2024-04-26' '19:26:04.299",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check_safepoint_forward http://127.0.0.1:2379 7362135480874022932 449349095076397059 449349095076397060 Verifying downstream PD is started... run task successfully table foreign_key.finish_mark not exists for 1-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc) 3716673536 bytes in 12.49 secs (297549776 bytes/sec) [Pipeline] { [Pipeline] cache check_changefeed_state http://127.0.0.1:2379 91b3cf6d-977b-4a5c-b5ce-f167209d805d stopped null + endpoints=http://127.0.0.1:2379 + changefeed_id=91b3cf6d-977b-4a5c-b5ce-f167209d805d + expected_state=stopped + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 91b3cf6d-977b-4a5c-b5ce-f167209d805d -s + info='{ "upstream_id": 7362135480874022932, "namespace": "default", "id": "91b3cf6d-977b-4a5c-b5ce-f167209d805d", "state": "stopped", "checkpoint_tso": 449349095076397060, "checkpoint_time": "2024-04-26 19:26:04.299", "error": null }' + echo '{ "upstream_id": 7362135480874022932, "namespace": "default", "id": "91b3cf6d-977b-4a5c-b5ce-f167209d805d", "state": "stopped", "checkpoint_tso": 449349095076397060, "checkpoint_time": "2024-04-26 19:26:04.299", "error": null }' { "upstream_id": 7362135480874022932, "namespace": "default", "id": "91b3cf6d-977b-4a5c-b5ce-f167209d805d", "state": "stopped", "checkpoint_tso": 449349095076397060, "checkpoint_time": "2024-04-26 19:26:04.299", "error": null } ++ echo '{' '"upstream_id":' 7362135480874022932, '"namespace":' '"default",' '"id":' '"91b3cf6d-977b-4a5c-b5ce-f167209d805d",' '"state":' '"stopped",' '"checkpoint_tso":' 449349095076397060, '"checkpoint_time":' '"2024-04-26' '19:26:04.299",' '"error":' null '}' ++ jq -r .state + state=stopped + [[ ! stopped == \s\t\o\p\p\e\d ]] ++ echo '{' '"upstream_id":' 7362135480874022932, '"namespace":' '"default",' '"id":' '"91b3cf6d-977b-4a5c-b5ce-f167209d805d",' '"state":' '"stopped",' '"checkpoint_tso":' 449349095076397060, '"checkpoint_time":' '"2024-04-26' '19:26:04.299",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully table foreign_key.finish_mark not exists for 2-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check_changefeed_state http://127.0.0.1:2379 f13c77db-f9f2-4950-9f4a-bc14621b3f37 normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=f13c77db-f9f2-4950-9f4a-bc14621b3f37 + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c f13c77db-f9f2-4950-9f4a-bc14621b3f37 -s find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/http_api_tls_with_user_auth/run.sh using Sink-Type: kafka... <<================= [Fri Apr 26 19:26:11 CST 2024] <<<<<< run test case http_api_tls_with_user_auth success! >>>>>> + info='{ "upstream_id": 7362135480874022932, "namespace": "default", "id": "f13c77db-f9f2-4950-9f4a-bc14621b3f37", "state": "normal", "checkpoint_tso": 449349096858976261, "checkpoint_time": "2024-04-26 19:26:11.099", "error": null }' + echo '{ "upstream_id": 7362135480874022932, "namespace": "default", "id": "f13c77db-f9f2-4950-9f4a-bc14621b3f37", "state": "normal", "checkpoint_tso": 449349096858976261, "checkpoint_time": "2024-04-26 19:26:11.099", "error": null }' { "upstream_id": 7362135480874022932, "namespace": "default", "id": "f13c77db-f9f2-4950-9f4a-bc14621b3f37", "state": "normal", "checkpoint_tso": 449349096858976261, "checkpoint_time": "2024-04-26 19:26:11.099", "error": null } ++ echo '{' '"upstream_id":' 7362135480874022932, '"namespace":' '"default",' '"id":' '"f13c77db-f9f2-4950-9f4a-bc14621b3f37",' '"state":' '"normal",' '"checkpoint_tso":' 449349096858976261, '"checkpoint_time":' '"2024-04-26' '19:26:11.099",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7362135480874022932, '"namespace":' '"default",' '"id":' '"f13c77db-f9f2-4950-9f4a-bc14621b3f37",' '"state":' '"normal",' '"checkpoint_tso":' 449349096858976261, '"checkpoint_time":' '"2024-04-26' '19:26:11.099",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check_safepoint_equal http://127.0.0.1:2379 7362135480874022932 Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table charset_gbk_test0.t0 exists table charset_gbk_test0.t1 exists table charset_gbk_test1.t0 not exists for 1-th check, retry later table foreign_key.finish_mark not exists for 3-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689f89080019 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6hf3q-f799q, pid:10138, start at 2024-04-26 19:26:12.586857649 +0800 CST m=+7.701839624 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:28:12.592 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:26:12.596 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:16:12.596 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) run task successfully table foreign_key.finish_mark not exists for 4-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689f89080019 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6hf3q-f799q, pid:10138, start at 2024-04-26 19:26:12.586857649 +0800 CST m=+7.701839624 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:28:12.592 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:26:12.596 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:16:12.596 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689f63a40015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6hf3q-f799q, pid:10222, start at 2024-04-26 19:26:10.190883533 +0800 CST m=+5.252730406 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:28:10.197 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:26:10.153 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:16:10.153 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... Changefeed remove successfully. ID: 91b3cf6d-977b-4a5c-b5ce-f167209d805d CheckpointTs: 449349095076397060 SinkURI: kafka://127.0.0.1:9092/ticdc-gc-safepoint-1596?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 check_safepoint_forward http://127.0.0.1:2379 7362135480874022932 449349097697837060 449349095076397060 449349096858976261 TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/partition_table/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/partition_table/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/partition_table/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/partition_table/tiflash/log/proxy.log"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/partition_table/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table charset_gbk_test1.t0 exists table test.finish_mark not exists for 1-th check, retry later run task successfully Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6xvpn-7nz6g is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1735-6xvpn-s11rw --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "74c220b3d6091a883f0d79ff4643b00569f35baa" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1735-6xvpn" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6xvpn-7nz6g" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6xvpn-7nz6g" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6xvpn-7nz6g" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6xvpn-7nz6g in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Changefeed remove successfully. ID: f13c77db-f9f2-4950-9f4a-bc14621b3f37 CheckpointTs: 449349097959981061 SinkURI: kafka://127.0.0.1:9092/ticdc-gc-safepoint-1596?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 check_safepoint_cleared http://127.0.0.1:2379 7362135480874022932 run task successfully + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.partition_table.cli.11585.out cli tso query --pd=http://127.0.0.1:2379 table test.finish_mark not exists for 2-th check, retry later wait process cdc.test exit for 1-th time... table foreign_key.finish_mark not exists for 5-th check, retry later wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:26:18 CST 2024] <<<<<< run test case gc_safepoint success! >>>>>> + set +x + tso='449349098549018625 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349098549018625 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Fri Apr 26 19:26:19 CST 2024] <<<<<< START cdc server in partition_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.partition_table.1162911631.out server --log-file /tmp/tidb_cdc_test/partition_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/partition_table/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table test.finish_mark not exists for 3-th check, retry later table foreign_key.finish_mark not exists for 6-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc) 3716673536 bytes in 7.45 secs (498790970 bytes/sec) [Pipeline] { [Pipeline] cache table test.finish_mark not exists for 4-th check, retry later table foreign_key.finish_mark not exists for 7-th check, retry later Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1735-1hp4d-3x2xd is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1735-1hp4d-h44v3 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "1548a9b79fdf76aa468b8624e2d6cf4b54349697" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1735-1hp4d" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-1hp4d-3x2xd" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-1hp4d-3x2xd" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-1hp4d-3x2xd" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1735-1hp4d-3x2xd in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:26:22 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5ef9bf7f-c007-498a-9af4-70bc35f925f6 {"id":"5ef9bf7f-c007-498a-9af4-70bc35f925f6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130779} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a27ac54d9 5ef9bf7f-c007-498a-9af4-70bc35f925f6 /tidb/cdc/default/default/upstream/7362135555638490979 {"id":7362135555638490979,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5ef9bf7f-c007-498a-9af4-70bc35f925f6 {"id":"5ef9bf7f-c007-498a-9af4-70bc35f925f6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130779} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a27ac54d9 5ef9bf7f-c007-498a-9af4-70bc35f925f6 /tidb/cdc/default/default/upstream/7362135555638490979 {"id":7362135555638490979,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5ef9bf7f-c007-498a-9af4-70bc35f925f6 {"id":"5ef9bf7f-c007-498a-9af4-70bc35f925f6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130779} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a27ac54d9 5ef9bf7f-c007-498a-9af4-70bc35f925f6 /tidb/cdc/default/default/upstream/7362135555638490979 {"id":7362135555638490979,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.partition_table.cli.11684.out cli changefeed create --start-ts=449349098549018625 '--sink-uri=kafka://127.0.0.1:9092/ticdc-partition-table-test-26287?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: 0a0d15ed-dd61-4221-9f7d-9c451ec52a1a Info: {"upstream_id":7362135555638490979,"namespace":"default","id":"0a0d15ed-dd61-4221-9f7d-9c451ec52a1a","sink_uri":"kafka://127.0.0.1:9092/ticdc-partition-table-test-26287?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:26:22.604124027+08:00","start_ts":449349098549018625,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349098549018625,"checkpoint_ts":449349098549018625,"checkpoint_time":"2024-04-26 19:26:17.546"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... table test.finish_mark not exists for 5-th check, retry later table foreign_key.finish_mark not exists for 8-th check, retry later find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/cli_tls_with_auth/run.sh using Sink-Type: kafka... <<================= [Fri Apr 26 19:26:22 CST 2024] <<<<<< run test case cli_tls_with_auth success! >>>>>> VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a0238c0009 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1, pid:10932, start at 2024-04-26 19:26:22.443473057 +0800 CST m=+8.332428728 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:28:22.449 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:26:22.435 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:16:22.435 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a0238c0009 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1, pid:10932, start at 2024-04-26 19:26:22.443473057 +0800 CST m=+8.332428728 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:28:22.449 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:26:22.435 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:16:22.435 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c689ff30c0015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1, pid:11018, start at 2024-04-26 19:26:19.367131677 +0800 CST m=+5.203744678 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:28:19.376 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:26:19.381 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:16:19.381 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/error.log arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + set +x [Fri Apr 26 19:26:24 CST 2024] <<<<<< START kafka consumer in partition_table case >>>>>> table foreign_key.finish_mark exists check diff successfully table test.finish_mark exists check table exists success check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 1-th time... + cd /tmp/tidb_cdc_test/synced_status_with_redo ++ run_cdc_cli_tso_query 127.0.0.1 2379 + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.cli.12365.out cli tso query --pd=http://127.0.0.1:2379 find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_basic/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... wait process cdc.test exit for 2-th time... wait process cdc.test exit for 2-th time... wait process cdc.test exit for 3-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:26:26 CST 2024] <<<<<< run test case foreign_key success! >>>>>> cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Fri Apr 26 19:26:27 CST 2024] <<<<<< run test case charset_gbk success! >>>>>> + set +x + tso='449349100774359042 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349100774359042 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + start_ts=449349100774359042 + run_cdc_server --workdir /tmp/tidb_cdc_test/synced_status_with_redo --binary cdc.test [Fri Apr 26 19:26:27 CST 2024] <<<<<< START cdc server in synced_status_with_redo case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.1240512407.out server --log-file /tmp/tidb_cdc_test/synced_status_with_redo/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/synced_status_with_redo/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [2024/04/26 19:26:18.212 +08:00] [INFO] [main.go:99] ["running ddl test: 1 modifyColumnDefaultValueDDL2"] [2024/04/26 19:26:18.212 +08:00] [INFO] [main.go:99] ["running ddl test: 0 modifyColumnDefaultValueDDL1"] [2024/04/26 19:26:18.302 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs17b305be_3260_4063_81be_ab08e3023152"] [2024/04/26 19:26:18.321 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs81c94dff_5338_4f70_9190_c9701240d6cf"] [2024/04/26 19:26:18.398 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs87fec494_01ce_46c6_b78d_e24a5b6cefe0"] [2024/04/26 19:26:18.448 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs6d1f5c70_382a_4a78_bc01_b937e6bacbcf"] [2024/04/26 19:26:18.471 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs8bb3d773_f10d_4ba1_8026_b120eb324c98"] [2024/04/26 19:26:18.528 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs238b539c_ae14_4b42_b9d9_f110a9978a0b"] [2024/04/26 19:26:18.639 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:18.643 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:18.664 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:18.666 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:18.739 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs3fec00bb_2e94_425a_a42b_4d2544a7eec5"] [2024/04/26 19:26:18.741 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsc30a0cd0_c86d_4b4e_9101_c54993f8dfff"] [2024/04/26 19:26:18.785 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:18.835 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:18.877 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:18.879 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:18.948 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:18.954 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:18.960 +08:00] [INFO] [main.go:178] ["1 insert success: 100"] [2024/04/26 19:26:18.964 +08:00] [INFO] [main.go:178] ["1 insert success: 100"] [2024/04/26 19:26:19.032 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:19.032 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:19.145 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:19.150 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:19.156 +08:00] [INFO] [main.go:178] ["0 insert success: 100"] [2024/04/26 19:26:19.161 +08:00] [INFO] [main.go:178] ["0 insert success: 100"] [2024/04/26 19:26:19.168 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:19.235 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:19.258 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:19.265 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:19.265 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:19.270 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:19.344 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:19.358 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:19.448 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:19.462 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:19.467 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:19.535 +08:00] [INFO] [main.go:178] ["1 insert success: 200"] [2024/04/26 19:26:19.540 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:19.541 +08:00] [INFO] [main.go:178] ["1 insert success: 200"] [2024/04/26 19:26:19.565 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:19.574 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:19.672 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:19.731 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:19.748 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:19.768 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:19.846 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:19.852 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:19.853 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:19.861 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:19.940 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:20.032 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:20.034 +08:00] [INFO] [main.go:178] ["0 insert success: 200"] [2024/04/26 19:26:20.035 +08:00] [INFO] [main.go:178] ["0 insert success: 200"] [2024/04/26 19:26:20.040 +08:00] [INFO] [main.go:199] ["0 delete success: 100"] [2024/04/26 19:26:20.041 +08:00] [INFO] [main.go:199] ["0 delete success: 100"] [2024/04/26 19:26:20.052 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:20.129 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:20.147 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:20.151 +08:00] [INFO] [main.go:178] ["1 insert success: 300"] [2024/04/26 19:26:20.157 +08:00] [INFO] [main.go:178] ["1 insert success: 300"] [2024/04/26 19:26:20.240 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:20.255 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:20.336 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:20.345 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:20.365 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:20.447 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:20.456 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:20.457 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:20.551 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:20.552 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:20.566 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:20.575 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:20.653 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:20.742 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:20.750 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:20.762 +08:00] [INFO] [main.go:178] ["1 insert success: 400"] [2024/04/26 19:26:20.766 +08:00] [INFO] [main.go:178] ["1 insert success: 400"] [2024/04/26 19:26:20.843 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:20.849 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:20.942 +08:00] [INFO] [main.go:178] ["0 insert success: 300"] [2024/04/26 19:26:20.946 +08:00] [INFO] [main.go:178] ["0 insert success: 300"] [2024/04/26 19:26:20.953 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:20.965 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:20.966 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:21.036 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:21.068 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:21.076 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:21.137 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:21.154 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:21.230 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:21.230 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:21.259 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:21.348 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:21.371 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:21.460 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:21.460 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:21.469 +08:00] [INFO] [main.go:178] ["1 insert success: 500"] [2024/04/26 19:26:21.474 +08:00] [INFO] [main.go:178] ["1 insert success: 500"] [2024/04/26 19:26:21.551 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:21.649 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:21.651 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:21.670 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:21.741 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:21.753 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:21.770 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:21.773 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:21.831 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:21.842 +08:00] [INFO] [main.go:178] ["0 insert success: 400"] [2024/04/26 19:26:21.843 +08:00] [INFO] [main.go:178] ["0 insert success: 400"] [2024/04/26 19:26:21.846 +08:00] [INFO] [main.go:199] ["0 delete success: 200"] [2024/04/26 19:26:21.848 +08:00] [INFO] [main.go:199] ["0 delete success: 200"] [2024/04/26 19:26:21.853 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:21.854 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:21.941 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:21.952 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:22.032 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:22.069 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:22.072 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:22.079 +08:00] [INFO] [main.go:178] ["1 insert success: 600"] [2024/04/26 19:26:22.135 +08:00] [INFO] [main.go:178] ["1 insert success: 600"] [2024/04/26 19:26:22.155 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:22.237 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:22.239 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:22.271 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:22.341 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:22.346 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:22.435 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:22.436 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:22.437 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:22.455 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:22.459 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:22.542 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:22.547 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:22.641 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:22.648 +08:00] [INFO] [main.go:178] ["0 insert success: 500"] [2024/04/26 19:26:22.653 +08:00] [INFO] [main.go:178] ["0 insert success: 500"] [2024/04/26 19:26:22.672 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:22.674 +08:00] [INFO] [main.go:178] ["1 insert success: 700"] [2024/04/26 19:26:22.730 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:22.731 +08:00] [INFO] [main.go:178] ["1 insert success: 700"] [2024/04/26 19:26:22.748 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:22.839 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:22.846 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:22.869 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:22.934 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:22.945 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:23.032 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:23.032 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:23.032 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:23.057 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:23.062 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:23.140 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:23.151 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:23.248 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:23.329 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:23.334 +08:00] [INFO] [main.go:178] ["1 insert success: 800"] [2024/04/26 19:26:23.340 +08:00] [INFO] [main.go:178] ["1 insert success: 800"] [2024/04/26 19:26:23.345 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:23.356 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:23.441 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:23.457 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:23.462 +08:00] [INFO] [main.go:178] ["0 insert success: 600"] [2024/04/26 19:26:23.464 +08:00] [INFO] [main.go:178] ["0 insert success: 600"] [2024/04/26 19:26:23.466 +08:00] [INFO] [main.go:199] ["0 delete success: 300"] [2024/04/26 19:26:23.468 +08:00] [INFO] [main.go:199] ["0 delete success: 300"] [2024/04/26 19:26:23.476 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:23.540 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:23.540 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:23.633 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:23.634 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:23.639 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:23.649 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:23.662 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:23.759 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:23.763 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:23.851 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:23.938 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:23.951 +08:00] [INFO] [main.go:178] ["1 insert success: 900"] [2024/04/26 19:26:23.958 +08:00] [INFO] [main.go:178] ["1 insert success: 900"] [2024/04/26 19:26:23.967 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:24.036 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:24.057 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:24.138 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:24.232 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:24.234 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:24.241 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:24.270 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:24.272 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:24.274 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:24.329 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:24.345 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:24.358 +08:00] [INFO] [main.go:178] ["0 insert success: 700"] [2024/04/26 19:26:24.365 +08:00] [INFO] [main.go:178] ["0 insert success: 700"] [2024/04/26 19:26:24.435 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:24.448 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:24.534 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:24.559 +08:00] [INFO] [main.go:178] ["1 insert success: 1000"] [2024/04/26 19:26:24.563 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:24.630 +08:00] [INFO] [main.go:178] ["1 insert success: 1000"] [2024/04/26 19:26:24.651 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:24.653 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:24.732 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:24.770 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:24.844 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:24.854 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:24.856 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:24.993 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:24.997 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:25.007 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:25.018 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:25.028 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:25.059 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:25.063 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:25.142 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:25.169 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:25.228 +08:00] [INFO] [main.go:178] ["1 insert success: 1100"] [2024/04/26 19:26:25.244 +08:00] [INFO] [main.go:178] ["1 insert success: 1100"] [2024/04/26 19:26:25.253 +08:00] [INFO] [main.go:178] ["0 insert success: 800"] [2024/04/26 19:26:25.257 +08:00] [INFO] [main.go:199] ["0 delete success: 400"] [2024/04/26 19:26:25.259 +08:00] [INFO] [main.go:178] ["0 insert success: 800"] [2024/04/26 19:26:25.263 +08:00] [INFO] [main.go:199] ["0 delete success: 400"] [2024/04/26 19:26:25.269 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:25.273 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:25.336 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:25.378 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:25.433 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:25.434 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:25.436 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:25.568 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:25.569 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:25.629 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:25.629 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:25.632 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:25.666 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:25.735 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:25.764 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:25.842 +08:00] [INFO] [main.go:178] ["1 insert success: 1200"] [2024/04/26 19:26:25.844 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:25.859 +08:00] [INFO] [main.go:178] ["1 insert success: 1200"] [2024/04/26 19:26:25.949 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:25.951 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:25.952 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:26.051 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:26.064 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:26.069 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:26.136 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:26.152 +08:00] [INFO] [main.go:178] ["0 insert success: 900"] [2024/04/26 19:26:26.158 +08:00] [INFO] [main.go:178] ["0 insert success: 900"] [2024/04/26 19:26:26.241 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:26.244 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:26.249 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:26.249 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:26.251 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:26.338 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:26.360 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:26.447 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:26.450 +08:00] [INFO] [main.go:178] ["1 insert success: 1300"] [2024/04/26 19:26:26.452 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:26.463 +08:00] [INFO] [main.go:178] ["1 insert success: 1300"] [2024/04/26 19:26:26.555 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:26.558 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:26.640 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:26.653 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:26:26.672 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:26:26.736 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:26:26.764 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:26.853 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:26.858 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:26:26.861 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:26.863 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:26.866 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:26.955 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:26:26.971 +08:00] [INFO] [main.go:178] ["0 insert success: 1000"] [2024/04/26 19:26:27.028 +08:00] [INFO] [main.go:199] ["0 delete success: 500"] [2024/04/26 19:26:27.032 +08:00] [INFO] [main.go:178] ["0 insert success: 1000"] [2024/04/26 19:26:27.036 +08:00] [INFO] [main.go:199] ["0 delete success: 500"] [2024/04/26 19:26:27.042 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:26:27.059 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:26:27.066 +08:00] [INFO] [main.go:178] ["1 insert success: 1400"] [2024/04/26 19:26:27.066 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:26:27.129 +08:00] [INFO] [main.go:178] ["1 insert success: 1400"] [2024/04/26 19:26:27.162 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:26:27.172 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:26:27.265 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/04/26 19:26:27.269 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:26:27.340 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/04/26 19:26:27.350 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/04/26 19:26:27.445 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:26:27.469 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:26:27.471 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/04/26 19:26:27.530 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:26:27.531 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:26:27.536 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:26:27.565 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/04/26 19:26:27.738 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/04/26 19:26:27.741 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/04/26 19:26:27.748 +08:00] [INFO] [main.go:178] ["1 insert success: 1500"] [2024/04/26 19:26:27.761 +08:00] [INFO] [main.go:178] ["1 insert success: 1500"] [2024/04/26 19:26:27.766 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/04/26 19:26:27.854 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/04/26 19:26:27.856 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/04/26 19:26:27.928 +08:00] [INFO] [main.go:178] ["0 insert success: 1100"] [2024/04/26 19:26:27.938 +08:00] [INFO] [main.go:178] ["0 insert success: 1100"] [2024/04/26 19:26:27.956 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/04/26 19:26:27.963 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/04/26 19:26:27.972 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/04/26 19:26:28.029 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/04/26 19:26:28.147 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/04/26 19:26:28.154 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/04/26 19:26:28.155 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/04/26 19:26:28.162 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/04/26 19:26:28.163 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/04/26 19:26:28.164 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/04/26 19:26:28.239 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/04/26 19:26:28.344 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/04/26 19:26:28.356 +08:00] [INFO] [main.go:178] ["1 insert success: 1600"] [2024/04/26 19:26:28.358 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/04/26 19:26:28.433 +08:00] [INFO] [main.go:178] ["1 insert success: 1600"] [2024/04/26 19:26:28.444 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/04/26 19:26:28.455 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/04/26 19:26:28.466 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/04/26 19:26:28.561 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/04/26 19:26:28.640 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/04/26 19:26:28.648 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/04/26 19:26:28.728 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/04/26 19:26:28.758 +08:00] [INFO] [main.go:178] ["0 insert success: 1200"] [2024/04/26 19:26:28.762 +08:00] [INFO] [main.go:199] ["0 delete success: 600"] [2024/04/26 19:26:28.766 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/04/26 19:26:28.767 +08:00] [INFO] [main.go:178] ["0 insert success: 1200"] [2024/04/26 19:26:28.828 +08:00] [INFO] [main.go:199] ["0 delete success: 600"] [2024/04/26 19:26:28.829 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/04/26 19:26:28.834 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/04/26 19:26:28.835 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/04/26 19:26:28.837 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/04/26 19:26:28.841 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/04/26 19:26:28.861 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/04/26 19:26:28.952 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/04/26 19:26:28.960 +08:00] [INFO] [main.go:178] ["1 insert success: 1700"] [2024/04/26 19:26:29.033 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/04/26 19:26:29.051 +08:00] [INFO] [main.go:178] ["1 insert success: 1700"] [2024/04/26 19:26:29.063 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/04/26 19:26:29.064 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/04/26 19:26:29.135 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/04/26 19:26:29.170 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] [2024/04/26 19:26:29.256 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/04/26 19:26:29.264 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/04/26 19:26:29.362 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] start tidb cluster in /tmp/tidb_cdc_test/kafka_simple_basic Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... [2024/04/26 19:26:29.429 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/04/26 19:26:29.452 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/04/26 19:26:29.461 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/04/26 19:26:29.466 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/04/26 19:26:29.474 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/04/26 19:26:29.480 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/04/26 19:26:29.561 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] [2024/04/26 19:26:29.569 +08:00] [INFO] [main.go:178] ["1 insert success: 1800"] [2024/04/26 19:26:29.569 +08:00] [INFO] [main.go:178] ["0 insert success: 1300"] [2024/04/26 19:26:29.587 +08:00] [INFO] [main.go:178] ["0 insert success: 1300"] [2024/04/26 19:26:29.633 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/04/26 19:26:29.635 +08:00] [INFO] [main.go:178] ["1 insert success: 1800"] [2024/04/26 19:26:29.643 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] [2024/04/26 19:26:29.659 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] [2024/04/26 19:26:29.662 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/04/26 19:26:29.870 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/04/26 19:26:29.871 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] [2024/04/26 19:26:29.955 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/04/26 19:26:29.958 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] [2024/04/26 19:26:29.976 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/04/26 19:26:29.980 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_pause_resume/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [2024/04/26 19:26:30.155 +08:00] [INFO] [main.go:178] ["1 insert success: 1900"] [2024/04/26 19:26:30.179 +08:00] [INFO] [main.go:178] ["1 insert success: 1900"] [2024/04/26 19:26:30.186 +08:00] [INFO] [main.go:178] ["72 insert success: 1900"] [2024/04/26 19:26:30.245 +08:00] [INFO] [main.go:178] ["73 insert success: 1900"] [2024/04/26 19:26:30.288 +08:00] [INFO] [main.go:178] ["0 insert success: 1400"] [2024/04/26 19:26:30.292 +08:00] [INFO] [main.go:199] ["0 delete success: 700"] [2024/04/26 19:26:30.332 +08:00] [INFO] [main.go:178] ["0 insert success: 1400"] [2024/04/26 19:26:30.336 +08:00] [INFO] [main.go:199] ["0 delete success: 700"] [2024/04/26 19:26:30.352 +08:00] [INFO] [main.go:178] ["72 insert success: 1900"] [2024/04/26 19:26:30.371 +08:00] [INFO] [main.go:178] ["72 insert success: 1900"] [2024/04/26 19:26:30.375 +08:00] [INFO] [main.go:178] ["73 insert success: 1900"] [2024/04/26 19:26:30.376 +08:00] [INFO] [main.go:178] ["73 insert success: 1900"] [2024/04/26 19:26:30.466 +08:00] [INFO] [main.go:178] ["1 insert success: 2000"] [2024/04/26 19:26:30.497 +08:00] [INFO] [main.go:178] ["72 insert success: 2000"] [2024/04/26 19:26:30.506 +08:00] [INFO] [main.go:178] ["1 insert success: 2000"] [2024/04/26 19:26:30.541 +08:00] [INFO] [main.go:178] ["73 insert success: 2000"] [2024/04/26 19:26:30.710 +08:00] [INFO] [main.go:178] ["0 insert success: 1500"] [2024/04/26 19:26:30.753 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs444aade2_7e8f_44d5_b093_7eec681cc1e5"] [2024/04/26 19:26:30.760 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs13a20eaf_acee_48e4_b34f_2e62cdc9dc64"] [2024/04/26 19:26:30.761 +08:00] [INFO] [main.go:178] ["1 insert success: 2100"] [2024/04/26 19:26:30.767 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsf30c7481_60c8_42a8_bad0_d9d77012f9f9"] [2024/04/26 19:26:30.776 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsc6fbc030_34d4_44a4_8906_d30e3beba63b"] [2024/04/26 19:26:30.785 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs2383c054_429c_4e16_8fc2_6d80c12a0d6a"] [2024/04/26 19:26:30.838 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs6e282372_5706_41ee_aa4f_277185d90d04"] [2024/04/26 19:26:30.862 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs9d700d4b_1fbd_492d_927b_a6925fce5c42"] + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:26:30 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c056a777-a674-4448-a5d5-780c1008621c {"id":"c056a777-a674-4448-a5d5-780c1008621c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130787} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a27d260d8 c056a777-a674-4448-a5d5-780c1008621c /tidb/cdc/default/default/upstream/7362135591121479750 {"id":7362135591121479750,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c056a777-a674-4448-a5d5-780c1008621c {"id":"c056a777-a674-4448-a5d5-780c1008621c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130787} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a27d260d8 c056a777-a674-4448-a5d5-780c1008621c /tidb/cdc/default/default/upstream/7362135591121479750 {"id":7362135591121479750,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c056a777-a674-4448-a5d5-780c1008621c {"id":"c056a777-a674-4448-a5d5-780c1008621c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130787} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a27d260d8 c056a777-a674-4448-a5d5-780c1008621c /tidb/cdc/default/default/upstream/7362135591121479750 {"id":7362135591121479750,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + config_path=conf/changefeed-redo.toml + SINK_URI='mysql://root@127.0.0.1:3306/?max-txn-row=1' + run_cdc_cli changefeed create --start-ts=449349100774359042 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/conf/changefeed-redo.toml + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.cli.12463.out cli changefeed create --start-ts=449349100774359042 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/conf/changefeed-redo.toml Create changefeed successfully! ID: test-1 Info: {"upstream_id":7362135591121479750,"namespace":"default","id":"test-1","sink_uri":"mysql://root@127.0.0.1:3306/?max-txn-row=1","create_time":"2024-04-26T19:26:31.035629129+08:00","start_ts":449349100774359042,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"eventual","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"storage":"file:///tmp/tidb_cdc_test/synced_status/redo","use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":120,"checkpoint_interval":20}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349100774359042,"checkpoint_ts":449349100774359042,"checkpoint_time":"2024-04-26 19:26:26.035"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [2024/04/26 19:26:31.154 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs5171ccf9_f1af_4e2d_a36a_9edba9643acc"] [2024/04/26 19:26:31.237 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:31.238 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:31.241 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:31.249 +08:00] [INFO] [main.go:178] ["1 insert success: 2200"] [2024/04/26 19:26:31.252 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:31.253 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:31.258 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:31.264 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:31.268 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:31.276 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:31.289 +08:00] [INFO] [main.go:178] ["0 insert success: 1600"] [2024/04/26 19:26:31.329 +08:00] [INFO] [main.go:199] ["0 delete success: 800"] [2024/04/26 19:26:31.331 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:31.340 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:31.343 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:31.362 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:31.366 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:31.569 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:31.577 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:31.663 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:31.667 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:31.668 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:31.729 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:31.734 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:31.736 +08:00] [INFO] [main.go:178] ["1 insert success: 2300"] [2024/04/26 19:26:31.738 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:31.743 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:31.751 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:31.760 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:31.837 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:31.848 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:31.850 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:31.871 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:31.872 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:31.970 +08:00] [INFO] [main.go:178] ["0 insert success: 1700"] [2024/04/26 19:26:32.069 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:32.129 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:32.159 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:32.167 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:32.168 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] table partition_table.t exists table partition_table.t1 exists table partition_table.t2 not exists for 1-th check, retry later [2024/04/26 19:26:32.228 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:32.230 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:32.235 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:32.238 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:32.240 +08:00] [INFO] [main.go:178] ["1 insert success: 2400"] [2024/04/26 19:26:32.250 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:32.260 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:32.285 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:32.296 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:32.328 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:32.344 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:32.348 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] + set +x ++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 221 100 221 0 0 2824 0 --:--:-- --:--:-- --:--:-- 2797 100 221 100 221 0 0 2822 0 --:--:-- --:--:-- --:--:-- 2797 + synced_status='{"synced":true,"sink_checkpoint_ts":"2024-04-26 19:26:26.035","puller_resolved_ts":"1970-01-01 08:00:00.000","last_synced_ts":"1970-01-01 08:00:00.000","now_ts":"2024-04-26 19:26:32.000","info":"Data syncing is finished"}' ++ echo '{"synced":true,"sink_checkpoint_ts":"2024-04-26' '19:26:26.035","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-04-26' '19:26:32.000","info":"Data' syncing is 'finished"}' ++ jq .synced + status=true ++ echo '{"synced":true,"sink_checkpoint_ts":"2024-04-26' '19:26:26.035","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-04-26' '19:26:32.000","info":"Data' syncing is 'finished"}' ++ jq -r .sink_checkpoint_ts [2024/04/26 19:26:32.481 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:32.540 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:32.572 +08:00] [INFO] [main.go:178] ["0 insert success: 1800"] [2024/04/26 19:26:32.572 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:32.576 +08:00] [INFO] [main.go:199] ["0 delete success: 900"] [2024/04/26 19:26:32.628 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:32.631 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:32.665 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:32.673 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:32.675 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:32.678 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:32.685 +08:00] [INFO] [main.go:178] ["1 insert success: 2500"] start tidb cluster in /tmp/tidb_cdc_test/changefeed_pause_resume Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... + sink_checkpoint_ts='2024-04-26 19:26:26.035' ++ echo '{"synced":true,"sink_checkpoint_ts":"2024-04-26' '19:26:26.035","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-04-26' '19:26:32.000","info":"Data' syncing is 'finished"}' ++ jq -r .puller_resolved_ts + puller_resolved_ts='1970-01-01 08:00:00.000' ++ echo '{"synced":true,"sink_checkpoint_ts":"2024-04-26' '19:26:26.035","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-04-26' '19:26:32.000","info":"Data' syncing is 'finished"}' ++ jq -r .last_synced_ts + last_synced_ts='1970-01-01 08:00:00.000' + '[' true '!=' true ']' + '[' '1970-01-01 08:00:00.000' '!=' '1970-01-01 08:00:00.000' ']' + '[' '1970-01-01 08:00:00.000' '!=' '1970-01-01 08:00:00.000' ']' ++ date '+%Y-%m-%d %H:%M:%S' + current='2024-04-26 19:26:32' + echo 'sink_checkpoint_ts is 2024-04-26' 19:26:26.035 sink_checkpoint_ts is 2024-04-26 19:26:26.035 ++ date -d '2024-04-26 19:26:26.035' +%s + checkpoint_timestamp=1714130786 ++ date -d '2024-04-26 19:26:32' +%s + current_timestamp=1714130792 + '[' 6 -gt 300 ']' + run_sql 'USE TEST;Create table t1(a int primary key, b int);insert into t1 values(1,2);insert into t1 values(2,3);' [2024/04/26 19:26:32.737 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:32.746 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:32.757 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:32.761 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:32.776 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:32.779 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:32.796 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:32.941 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:32.948 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... [2024/04/26 19:26:33.033 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:33.050 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:33.055 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:33.133 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:33.137 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:33.139 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:33.139 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:33.182 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:33.190 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:33.229 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] + check_table_exists test.t1 127.0.0.1 3306 table test.t1 not exists for 1-th check, retry later [2024/04/26 19:26:33.238 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:33.251 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:33.253 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:33.261 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:33.345 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:33.359 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:33.428 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:33.444 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:33.460 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:33.541 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:33.546 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:33.550 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:33.550 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:33.587 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:33.638 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:33.646 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:33.656 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:33.674 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:33.678 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:33.685 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] Starting Upstream TiDB... [2024/04/26 19:26:33.777 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:33.833 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:33.864 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:33.875 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:33.948 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table partition_table.t2 not exists for 2-th check, retry later [2024/04/26 19:26:34.032 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:34.033 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:34.049 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:34.054 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:34.084 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:34.135 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:34.139 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:34.148 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:34.165 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:34.182 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:34.184 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:34.254 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:34.266 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:34.293 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:34.339 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:34.371 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:34.441 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:34.445 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:34.468 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:34.473 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:34.552 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:34.553 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:34.562 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:34.566 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:34.580 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:34.638 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:34.642 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:34.670 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:34.686 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:34.738 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:34.751 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [2024/04/26 19:26:34.784 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:34.856 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:34.858 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:34.882 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:34.929 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:34.970 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:34.975 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:34.979 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:35.030 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:35.037 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:35.068 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:35.072 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:35.085 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:35.143 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:35.250 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:35.347 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:35.361 +08:00] [INFO] [main.go:88] ["testGetDefaultValue take 17.148621262s"] [2024/04/26 19:26:35.387 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:35.449 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:35.451 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:35.479 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:35.529 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:35.568 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:35.573 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:35.576 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:35.643 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:35.643 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:35.652 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:35.655 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:35.658 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:35.659 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:35.754 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:35.775 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.t1 exists + sleep 5 [2024/04/26 19:26:35.856 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:35.943 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:35.945 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:35.955 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:35.967 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] table partition_table.t2 not exists for 3-th check, retry later [2024/04/26 19:26:36.057 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:36.060 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:36.064 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:36.085 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:36.130 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:36.130 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:36.132 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:36.136 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:36.137 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:36.198 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:36.231 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:36.265 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:36.355 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:36.357 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:36.358 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:36.369 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:36.462 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:36.535 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:36.544 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:36.546 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:36.560 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:36.560 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:36.564 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:36.587 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:36.595 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:36.672 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:36.673 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:36.677 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:36.761 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:36.774 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:36.781 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:36.783 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc) 3716673536 bytes in 13.76 secs (270030268 bytes/sec) [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1735-cdv81-9zlwn is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1735-cdv81-dvjml --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "e1b5b2cf6ffebdfa819d22f25145e2ce5018a05f" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1735-cdv81" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-cdv81-9zlwn" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-cdv81-9zlwn" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-cdv81-9zlwn" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1735-cdv81-9zlwn in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Timeout set to expire in 6 min 0 sec [Pipeline] { [2024/04/26 19:26:36.867 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:36.948 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:36.958 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:36.967 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:36.970 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:36.973 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:36.975 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:37.045 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [Pipeline] sh [2024/04/26 19:26:37.059 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:37.137 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:26:37.138 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:26:37.143 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:26:37.229 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:26:37.239 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:26:37.258 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:26:37.261 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [Pipeline] sh Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/04/26 19:26:37.339 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:26:37.382 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:26:37.382 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:26:37.430 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:26:37.442 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:26:37.442 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:26:37.447 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:26:37.471 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:26:37.481 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:26:37.551 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/04/26 19:26:37.552 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/04/26 19:26:37.552 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh [2024/04/26 19:26:37.641 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/04/26 19:26:37.647 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/04/26 19:26:37.677 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/04/26 19:26:37.730 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/04/26 19:26:37.758 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [2024/04/26 19:26:38.029 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/04/26 19:26:38.036 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [2024/04/26 19:26:38.132 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/04/26 19:26:38.143 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/04/26 19:26:38.147 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/04/26 19:26:38.149 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/04/26 19:26:38.231 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/04/26 19:26:38.242 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table partition_table.t2 not exists for 4-th check, retry later [2024/04/26 19:26:38.336 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/04/26 19:26:38.337 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/04/26 19:26:38.342 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/04/26 19:26:38.457 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/04/26 19:26:38.458 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/04/26 19:26:38.628 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/04/26 19:26:38.631 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/04/26 19:26:38.645 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/04/26 19:26:38.733 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/04/26 19:26:38.736 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/04/26 19:26:38.778 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/04/26 19:26:38.787 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/04/26 19:26:38.830 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/04/26 19:26:38.831 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/04/26 19:26:38.849 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/04/26 19:26:38.859 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/04/26 19:26:38.878 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/04/26 19:26:38.883 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/04/26 19:26:38.892 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/04/26 19:26:38.950 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/04/26 19:26:38.959 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/04/26 19:26:39.033 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/04/26 19:26:39.034 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/04/26 19:26:39.050 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/ddl_puller_lag/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [2024/04/26 19:26:39.086 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/04/26 19:26:39.140 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/04/26 19:26:39.172 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/04/26 19:26:39.257 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/04/26 19:26:39.259 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/04/26 19:26:39.268 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/04/26 19:26:39.272 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/04/26 19:26:39.277 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/04/26 19:26:39.338 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] [2024/04/26 19:26:39.343 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] [2024/04/26 19:26:39.356 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/04/26 19:26:39.385 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/04/26 19:26:39.430 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/04/26 19:26:39.462 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] [2024/04/26 19:26:39.468 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] [2024/04/26 19:26:39.530 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/04/26 19:26:39.561 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] [2024/04/26 19:26:39.582 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/04/26 19:26:39.645 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] [2024/04/26 19:26:39.736 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/04/26 19:26:39.743 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/04/26 19:26:39.744 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] [2024/04/26 19:26:39.751 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] [2024/04/26 19:26:39.752 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/04/26 19:26:39.773 +08:00] [INFO] [main.go:178] ["72 insert success: 1900"] [2024/04/26 19:26:39.941 +08:00] [INFO] [main.go:178] ["73 insert success: 1900"] [2024/04/26 19:26:39.948 +08:00] [INFO] [main.go:178] ["73 insert success: 1900"] [2024/04/26 19:26:40.037 +08:00] [INFO] [main.go:178] ["72 insert success: 1900"] [2024/04/26 19:26:40.051 +08:00] [INFO] [main.go:178] ["72 insert success: 1900"] [2024/04/26 19:26:40.073 +08:00] [INFO] [main.go:178] ["73 insert success: 1900"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a128900018 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7vksj-r2cpw, pid:1815, start at 2024-04-26 19:26:39.179068804 +0800 CST m=+5.397677418 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:28:39.188 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:26:39.190 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:16:39.190 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/04/26 19:26:40.154 +08:00] [INFO] [main.go:178] ["72 insert success: 1900"] [2024/04/26 19:26:40.174 +08:00] [INFO] [main.go:178] ["73 insert success: 1900"] [2024/04/26 19:26:40.246 +08:00] [INFO] [main.go:178] ["72 insert success: 1900"] [2024/04/26 19:26:40.283 +08:00] [INFO] [main.go:178] ["73 insert success: 1900"] [2024/04/26 19:26:40.284 +08:00] [INFO] [main.go:178] ["72 insert success: 1900"] [2024/04/26 19:26:40.289 +08:00] [INFO] [main.go:178] ["73 insert success: 1900"] [2024/04/26 19:26:40.329 +08:00] [INFO] [main.go:178] ["72 insert success: 1900"] [2024/04/26 19:26:40.333 +08:00] [INFO] [main.go:178] ["73 insert success: 1900"] [2024/04/26 19:26:40.344 +08:00] [INFO] [main.go:178] ["72 insert success: 2000"] table partition_table.t2 exists table partition_table.finish_mark not exists for 1-th check, retry later [2024/04/26 19:26:40.394 +08:00] [INFO] [main.go:178] ["73 insert success: 2000"] [2024/04/26 19:26:40.402 +08:00] [INFO] [main.go:178] ["73 insert success: 2000"] [2024/04/26 19:26:40.439 +08:00] [INFO] [main.go:178] ["72 insert success: 2000"] [2024/04/26 19:26:40.454 +08:00] [INFO] [main.go:178] ["72 insert success: 2000"] [2024/04/26 19:26:40.546 +08:00] [INFO] [main.go:178] ["73 insert success: 2000"] [2024/04/26 19:26:40.563 +08:00] [INFO] [main.go:178] ["72 insert success: 2000"] [2024/04/26 19:26:40.637 +08:00] [INFO] [main.go:178] ["73 insert success: 2000"] [2024/04/26 19:26:40.674 +08:00] [INFO] [main.go:178] ["72 insert success: 2000"] [2024/04/26 19:26:40.772 +08:00] [INFO] [main.go:178] ["72 insert success: 2000"] [2024/04/26 19:26:40.772 +08:00] [INFO] [main.go:178] ["73 insert success: 2000"] [2024/04/26 19:26:40.848 +08:00] [INFO] [main.go:178] ["73 insert success: 2000"] [2024/04/26 19:26:40.855 +08:00] [INFO] [main.go:178] ["73 insert success: 2000"] [2024/04/26 19:26:40.855 +08:00] [INFO] [main.go:178] ["72 insert success: 2000"] [2024/04/26 19:26:40.931 +08:00] [INFO] [main.go:178] ["73 insert success: 2100"] [2024/04/26 19:26:40.942 +08:00] [INFO] [main.go:178] ["72 insert success: 2100"] [2024/04/26 19:26:40.958 +08:00] [INFO] [main.go:178] ["72 insert success: 2100"] [2024/04/26 19:26:41.006 +08:00] [INFO] [main.go:178] ["73 insert success: 2100"] [2024/04/26 19:26:41.055 +08:00] [INFO] [main.go:178] ["73 insert success: 2100"] [2024/04/26 19:26:41.076 +08:00] [INFO] [main.go:178] ["72 insert success: 2100"] ++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 243 100 243 0 0 3009 0 --:--:-- --:--:-- --:--:-- 3037 + synced_status='{"synced":false,"sink_checkpoint_ts":"2024-04-26 19:26:38.435","puller_resolved_ts":"2024-04-26 19:26:32.685","last_synced_ts":"2024-04-26 19:26:33.235","now_ts":"2024-04-26 19:26:40.000","info":"The data syncing is not finished, please wait"}' ++ echo '{"synced":false,"sink_checkpoint_ts":"2024-04-26' '19:26:38.435","puller_resolved_ts":"2024-04-26' '19:26:32.685","last_synced_ts":"2024-04-26' '19:26:33.235","now_ts":"2024-04-26' '19:26:40.000","info":"The' data syncing is not finished, please 'wait"}' ++ jq .synced + status=false + '[' false '!=' false ']' ++ echo '{"synced":false,"sink_checkpoint_ts":"2024-04-26' '19:26:38.435","puller_resolved_ts":"2024-04-26' '19:26:32.685","last_synced_ts":"2024-04-26' '19:26:33.235","now_ts":"2024-04-26' '19:26:40.000","info":"The' data syncing is not finished, please 'wait"}' ++ jq -r .info + info='The data syncing is not finished, please wait' + '[' 'The data syncing is not finished, please wait' '!=' 'The data syncing is not finished, please wait' ']' + sleep 130 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/04/26 19:26:41.115 +08:00] [INFO] [main.go:178] ["72 insert success: 2100"] [2024/04/26 19:26:41.120 +08:00] [INFO] [main.go:178] ["73 insert success: 2100"] [2024/04/26 19:26:41.175 +08:00] [INFO] [main.go:178] ["73 insert success: 2100"] [2024/04/26 19:26:41.176 +08:00] [INFO] [main.go:178] ["72 insert success: 2100"] [2024/04/26 19:26:41.183 +08:00] [INFO] [main.go:178] ["73 insert success: 2100"] [2024/04/26 19:26:41.447 +08:00] [INFO] [main.go:178] ["73 insert success: 2200"] [2024/04/26 19:26:41.464 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsc72778ea_58b3_42f8_a465_00d26e2af535"] [2024/04/26 19:26:41.475 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs1e298018_6d82_43c8_bad7_a6717b033bad"] [2024/04/26 19:26:41.497 +08:00] [INFO] [main.go:178] ["72 insert success: 2200"] [2024/04/26 19:26:41.630 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs33b23bf7_3d1e_4dd5_a5e5_ed914c83698e"] [2024/04/26 19:26:41.654 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsd2a40f8b_47f3_443b_80bc_6c9fcbf3d5fd"] [2024/04/26 19:26:41.749 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs1bfac684_a434_4160_9bab_5ef8cc67e154"] [2024/04/26 19:26:41.834 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs9160617a_7078_4702_9330_26b79c517256"] [2024/04/26 19:26:41.835 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLscaa0b5a1_c515_40c3_8815_58d41ced3b6c"] [2024/04/26 19:26:41.935 +08:00] [INFO] [main.go:178] ["73 insert success: 2300"] [2024/04/26 19:26:41.960 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:41.964 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:41.969 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:42.032 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:42.053 +08:00] [INFO] [main.go:178] ["72 insert success: 2300"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a128900018 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7vksj-r2cpw, pid:1815, start at 2024-04-26 19:26:39.179068804 +0800 CST m=+5.397677418 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:28:39.188 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:26:39.190 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:16:39.190 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a135500014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7vksj-r2cpw, pid:1891, start at 2024-04-26 19:26:39.986292749 +0800 CST m=+6.144181339 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:28:39.993 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:26:39.956 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:16:39.956 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_simple_basic/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_simple_basic/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_simple_basic/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_simple_basic/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_simple_basic/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/ddl_manager/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table partition_table.finish_mark not exists for 2-th check, retry later [2024/04/26 19:26:42.165 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:42.165 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:42.252 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:42.261 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:42.361 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:42.366 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [Pipeline] { [2024/04/26 19:26:42.530 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:42.532 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:42.540 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:42.541 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [Pipeline] { [2024/04/26 19:26:42.651 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:42.657 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:42.670 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:42.671 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:42.785 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:42.788 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:42.852 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:42.880 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:42.881 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [Pipeline] { [Pipeline] } [Pipeline] } [Pipeline] } [2024/04/26 19:26:42.945 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:43.028 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:43.030 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:43.055 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:43.133 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a1562c0013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb, pid:4954, start at 2024-04-26 19:26:42.090315812 +0800 CST m=+5.193718805 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:28:42.098 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:26:42.059 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:16:42.059 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] checkout [2024/04/26 19:26:43.254 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:43.268 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:43.348 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:43.370 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git The recommended git tool is: git [Pipeline] // container [2024/04/26 19:26:43.438 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:43.464 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:43.470 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:43.485 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:43.549 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:43.550 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:43.551 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:43.566 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:43.637 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:43.645 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [Pipeline] // container [Pipeline] // container [Pipeline] sh [2024/04/26 19:26:43.669 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs74d8dcc3_ac32_429c_8ddc_4558c89810ee"] [2024/04/26 19:26:43.669 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:43.763 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [Pipeline] sh [2024/04/26 19:26:43.937 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:43.966 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:44.041 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:44.137 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:44.147 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:44.160 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] table partition_table.finish_mark not exists for 3-th check, retry later No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@c80428f; decorates RemoteLauncher[hudson.remoting.Channel@7e3547bc:JNLP4-connect connection from 10.233.126.234/10.233.126.234:55134] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G13 Run cases: tiflash region_merge common_1 PROW_JOB_ID=586cc306-42d6-4207-90bf-aa4c613fe26b JENKINS_NODE_COOKIE=b057ba4a-4e23-41a0-a314-167a73f5cef4 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h32rl-nl1kg HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1783813226958426112","prowjobid":"586cc306-42d6-4207-90bf-aa4c613fe26b","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","base_link":"https://github.com/pingcap/tiflow/commit/d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","pulls":[{"number":10904,"author":"CharlesCheung96","sha":"16f5d59f936001f6d7031387873b3c668f3c5ae6","title":"redo(ticdc): enable pprof and set memory limit for redo applier","link":"https://github.com/pingcap/tiflow/pull/10904","commit_link":"https://github.com/pingcap/tiflow/pull/10904/commits/16f5d59f936001f6d7031387873b3c668f3c5ae6","author_link":"https://github.com/CharlesCheung96"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1735 TEST_GROUP=G13 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1783813226958426112 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=618760b8f23481032f6d0010db684d46f840e8e1 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-h32rl GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-h32rl pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h32rl-nl1kg GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h32rl-nl1kg GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1735 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/tiflash/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... start tidb cluster in /tmp/tidb_cdc_test/ddl_puller_lag Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... [Pipeline] sh [2024/04/26 19:26:44.244 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:44.244 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:44.253 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:44.331 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:44.354 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:44.356 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:44.363 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:44.363 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@75a8fa18; decorates RemoteLauncher[hudson.remoting.Channel@4b06cb48:JNLP4-connect connection from 10.233.84.17/10.233.84.17:60292] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G11 Run cases: resolve_lock move_table autorandom generate_column PROW_JOB_ID=586cc306-42d6-4207-90bf-aa4c613fe26b JENKINS_NODE_COOKIE=3cb38a8c-98aa-4f50-ac46-51ce7c0ebe7f BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z9nq8-r4p88 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1783813226958426112","prowjobid":"586cc306-42d6-4207-90bf-aa4c613fe26b","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","base_link":"https://github.com/pingcap/tiflow/commit/d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","pulls":[{"number":10904,"author":"CharlesCheung96","sha":"16f5d59f936001f6d7031387873b3c668f3c5ae6","title":"redo(ticdc): enable pprof and set memory limit for redo applier","link":"https://github.com/pingcap/tiflow/pull/10904","commit_link":"https://github.com/pingcap/tiflow/pull/10904/commits/16f5d59f936001f6d7031387873b3c668f3c5ae6","author_link":"https://github.com/CharlesCheung96"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1735 TEST_GROUP=G11 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1783813226958426112 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=618760b8f23481032f6d0010db684d46f840e8e1 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-z9nq8 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-z9nq8 pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z9nq8-r4p88 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z9nq8-r4p88 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1735 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/resolve_lock/run.sh using Sink-Type: kafka... <<================= [Fri Apr 26 19:26:44 CST 2024] <<<<<< START cdc server in kafka_simple_basic case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_basic.32283230.out server --log-file /tmp/tidb_cdc_test/kafka_simple_basic/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_simple_basic/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [2024/04/26 19:26:44.440 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:44.529 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:44.638 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:44.671 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G15 Run cases: new_ci_collation batch_add_table multi_rocks PROW_JOB_ID=586cc306-42d6-4207-90bf-aa4c613fe26b JENKINS_NODE_COOKIE=29bc09b0-b034-4d56-850f-7833e9260995 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-llqb6-jpndj HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1783813226958426112","prowjobid":"586cc306-42d6-4207-90bf-aa4c613fe26b","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","base_link":"https://github.com/pingcap/tiflow/commit/d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","pulls":[{"number":10904,"author":"CharlesCheung96","sha":"16f5d59f936001f6d7031387873b3c668f3c5ae6","title":"redo(ticdc): enable pprof and set memory limit for redo applier","link":"https://github.com/pingcap/tiflow/pull/10904","commit_link":"https://github.com/pingcap/tiflow/pull/10904/commits/16f5d59f936001f6d7031387873b3c668f3c5ae6","author_link":"https://github.com/CharlesCheung96"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1735 TEST_GROUP=G15 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1783813226958426112 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=618760b8f23481032f6d0010db684d46f840e8e1 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-llqb6 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-llqb6-jpndj pingcap_tiflow_pull_cdc_integration_kafka_test_1735-llqb6 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-llqb6-jpndj GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1735 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/new_ci_collation/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [2024/04/26 19:26:44.733 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:44.831 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:44.844 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:44.845 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:44.866 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:44.866 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a1562c0013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb, pid:4954, start at 2024-04-26 19:26:42.090315812 +0800 CST m=+5.193718805 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:28:42.098 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:26:42.059 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:16:42.059 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a154740015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb, pid:5035, start at 2024-04-26 19:26:41.972735983 +0800 CST m=+5.024000345 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:28:41.979 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:26:41.949 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:16:41.949 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... [2024/04/26 19:26:44.935 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:44.953 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:44.969 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:44.972 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:45.030 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:45.033 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:45.045 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:45.134 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/log/error.log arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_pause_resume/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [2024/04/26 19:26:45.328 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:45.371 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:45.430 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:45.538 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:45.539 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:45.545 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:45.562 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:45.564 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:45.575 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:45.638 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:45.644 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:45.648 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:45.650 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:45.657 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:45.671 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@621caf71; decorates RemoteLauncher[hudson.remoting.Channel@44037a05:JNLP4-connect connection from 10.233.69.221/10.233.69.221:39700] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 [2024/04/26 19:26:45.744 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] table partition_table.finish_mark not exists for 4-th check, retry later [2024/04/26 19:26:45.964 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:46.062 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:46.080 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:46.235 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:46.235 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:46.247 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:46.248 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:46.250 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:46.256 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:46.270 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:46.271 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:46.273 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:46.274 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:46.285 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:46.335 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:46.345 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:46.547 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:46.655 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Avoid second fetch Checking out Revision cb9bc9e8822a01a5d59a2f670fb429e588065145 (origin/main) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Commit message: "feat(tidb): test flashbacktest package (#2942)" [Pipeline] withEnv [2024/04/26 19:26:46.739 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:46.951 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:46.957 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:46.958 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:46.962 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [Pipeline] { Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 [Pipeline] container [Pipeline] { Avoid second fetch [Pipeline] stage [Pipeline] { (Test) Checking out Revision cb9bc9e8822a01a5d59a2f670fb429e588065145 (origin/main) [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > [2024/04/26 19:26:46.966 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:47.038 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:47.050 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:47.050 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:47.054 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:47.058 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:47.069 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:47.075 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:47.130 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [Fri Apr 26 19:26:47 CST 2024] <<<<<< START cdc server in changefeed_pause_resume case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_pause_resume.63736375.out server --log-file /tmp/tidb_cdc_test/changefeed_pause_resume/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_pause_resume/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Commit message: "feat(tidb): test flashbacktest package (#2942)" [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] container [Pipeline] { < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:26:47 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/428f44a5-312d-4506-81de-af0940b44679 {"id":"428f44a5-312d-4506-81de-af0940b44679","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130804} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a282342cf 428f44a5-312d-4506-81de-af0940b44679 /tidb/cdc/default/default/upstream/7362135688117778371 {"id":7362135688117778371,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/428f44a5-312d-4506-81de-af0940b44679 {"id":"428f44a5-312d-4506-81de-af0940b44679","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130804} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a282342cf 428f44a5-312d-4506-81de-af0940b44679 /tidb/cdc/default/default/upstream/7362135688117778371 {"id":7362135688117778371,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/428f44a5-312d-4506-81de-af0940b44679 {"id":"428f44a5-312d-4506-81de-af0940b44679","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130804} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a282342cf 428f44a5-312d-4506-81de-af0940b44679 /tidb/cdc/default/default/upstream/7362135688117778371 {"id":7362135688117778371,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_basic.cli.3284.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-simple-basic-14233?protocol=simple' --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_basic/conf/changefeed.toml -c simple-basic [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache [2024/04/26 19:26:47.241 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:47.335 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:47.337 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] start tidb cluster in /tmp/tidb_cdc_test/tiflash Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... [2024/04/26 19:26:47.547 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:47.553 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:47.558 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:47.633 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:47.648 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] The 1 times to try to start tidb cluster... > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f cb9bc9e8822a01a5d59a2f670fb429e588065145 # timeout=10 Create changefeed successfully! ID: simple-basic Info: {"upstream_id":7362135688117778371,"namespace":"default","id":"simple-basic","sink_uri":"kafka://127.0.0.1:9092/ticdc-simple-basic-14233?protocol=simple","create_time":"2024-04-26T19:26:47.72928718+08:00","start_ts":449349106424872962,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"simple","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":5,"send_bootstrap_in_msg_count":100,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"correctness","corruption_handle_level":"error"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349106424872962,"checkpoint_ts":449349106424872962,"checkpoint_time":"2024-04-26 19:26:47.590"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... [2024/04/26 19:26:47.728 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:47.737 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:47.737 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:47.738 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:47.742 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:47.756 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:47.761 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:47.773 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:47.877 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:47.965 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:47.966 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f cb9bc9e8822a01a5d59a2f670fb429e588065145 # timeout=10 start tidb cluster in /tmp/tidb_cdc_test/new_ci_collation Starting Upstream PD... [2024/04/26 19:26:48.136 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:48.144 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:48.150 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:48.233 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [2024/04/26 19:26:48.237 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:48.437 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:48.443 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:48.447 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:48.458 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:48.462 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:48.464 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] table partition_table.finish_mark not exists for 5-th check, retry later [2024/04/26 19:26:48.540 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:48.543 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:48.644 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:48.669 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:48.679 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] start tidb cluster in /tmp/tidb_cdc_test/ddl_manager Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... [2024/04/26 19:26:48.758 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:48.829 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:48.831 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:48.838 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:48.844 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:48.962 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... Avoid second fetch Checking out Revision cb9bc9e8822a01a5d59a2f670fb429e588065145 (origin/main) [2024/04/26 19:26:49.034 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:49.039 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:49.045 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:49.054 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:49.062 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:49.151 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:49.158 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] + set +x > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f cb9bc9e8822a01a5d59a2f670fb429e588065145 # timeout=10 Commit message: "feat(tidb): test flashbacktest package (#2942)" [2024/04/26 19:26:49.359 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:49.368 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:49.454 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:49.473 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:49.473 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:49.539 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:49.542 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:49.593 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:49.645 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:49.646 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:49.654 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:49.657 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:49.661 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:49.695 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:49.848 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:49.848 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:49.956 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:50.063 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:50.137 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:50.154 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:50.156 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] Starting Upstream TiDB... table partition_table.finish_mark not exists for 6-th check, retry later [2024/04/26 19:26:50.272 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:50.335 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:50.340 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:50.348 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:50.361 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:50.367 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:26:50 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/738f2938-c167-4238-a24f-93977bd89f78 {"id":"738f2938-c167-4238-a24f-93977bd89f78","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130807} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a283163cc 738f2938-c167-4238-a24f-93977bd89f78 /tidb/cdc/default/default/upstream/7362135697680945787 {"id":7362135697680945787,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/738f2938-c167-4238-a24f-93977bd89f78 {"id":"738f2938-c167-4238-a24f-93977bd89f78","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130807} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a283163cc 738f2938-c167-4238-a24f-93977bd89f78 /tidb/cdc/default/default/upstream/7362135697680945787 {"id":7362135697680945787,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/738f2938-c167-4238-a24f-93977bd89f78 {"id":"738f2938-c167-4238-a24f-93977bd89f78","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130807} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a283163cc 738f2938-c167-4238-a24f-93977bd89f78 /tidb/cdc/default/default/upstream/7362135697680945787 {"id":7362135697680945787,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Fri Apr 26 19:26:50 CST 2024] <<<<<< START kafka consumer in changefeed_pause_resume case >>>>>> Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/04/26 19:26:50.532 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:26:50.667 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:26:50.668 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:26:50.669 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:26:50.779 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:50.842 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:26:50.846 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:26:50.850 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:26:50.870 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:50.879 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:26:50.998 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] table changefeed_pause_resume.t1 not exists for 1-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/resolve_lock Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... [2024/04/26 19:26:51.094 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/04/26 19:26:51.153 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:51.216 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/04/26 19:26:51.223 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/04/26 19:26:51.241 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:51.439 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsc9c2ed57_4f26_4487_88e0_54d74e0da51f"] [2024/04/26 19:26:51.458 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsc1b2a3ac_b364_4fa0_8bdc_a8777b39c8a7"] [2024/04/26 19:26:51.474 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:51.495 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs82f41f86_d855_4f50_85a1_faee04b4cfff"] [2024/04/26 19:26:51.526 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs623c26a4_8447_441b_97b9_727194b44a6d"] [2024/04/26 19:26:51.536 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [2024/04/26 19:26:51.742 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs8a3887da_5cf3_4916_84ca_792db1521885"] [2024/04/26 19:26:51.843 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs886af9e5_4713_4d5d_961f_4216c5687ec6"] [2024/04/26 19:26:51.858 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs76f99fb5_0257_4b43_8385_a23a896dd825"] [2024/04/26 19:26:51.947 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:51.956 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:51.971 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:52.031 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:52.067 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:52.070 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:52.130 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:52.132 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:52.268 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] table partition_table.finish_mark not exists for 7-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/04/26 19:26:52.335 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:52.430 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:52.444 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:52.460 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:52.467 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:52.537 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:52.539 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:52.635 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:52.638 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:52.675 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:52.675 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:52.688 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:52.731 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:52.792 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] table changefeed_pause_resume.t1 not exists for 2-th check, retry later [2024/04/26 19:26:52.831 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:52.870 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:52.882 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:52.930 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:52.943 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:52.955 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:52.959 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [2024/04/26 19:26:53.069 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:53.134 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:53.168 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:53.231 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:53.263 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:53.269 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/04/26 19:26:53.347 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs455dd319_bf72_4a1d_a9e6_e5908271c887"] [2024/04/26 19:26:53.435 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:53.446 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:53.529 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:53.537 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:53.542 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:26:53.552 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:53.564 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:53.572 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] Starting Upstream TiDB... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [2024/04/26 19:26:53.671 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:53.730 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:53.762 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:53.771 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/04/26 19:26:53.846 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:53.943 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:54.036 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:26:54.039 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:26:54.070 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] table partition_table.finish_mark exists check diff successfully [2024/04/26 19:26:54.161 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:54.169 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:54.235 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:54.238 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:54.241 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:54.257 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:54.330 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [Fri Apr 26 19:26:54 CST 2024] <<<<<< START kafka consumer in kafka_simple_basic case >>>>>> [2024/04/26 19:26:54.371 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:54.371 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:54.452 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:54.455 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:54.528 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:54.546 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] wait process cdc.test exit for 1-th time... [2024/04/26 19:26:54.648 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:26:54.659 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:26:54.847 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:54.847 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/04/26 19:26:54.858 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:54.868 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:54.941 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:54.953 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:54.961 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:55.041 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:55.045 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:55.052 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:55.075 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:55.078 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] table test.finish_mark_for_ddl not exists for 1-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 2-th time... [2024/04/26 19:26:55.129 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:55.149 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:55.231 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:26:55.244 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table changefeed_pause_resume.t1 exists table changefeed_pause_resume.t2 exists table changefeed_pause_resume.t3 not exists for 1-th check, retry later [2024/04/26 19:26:55.362 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:55.429 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:55.435 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:55.460 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:55.540 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:55.548 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/04/26 19:26:55.662 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [2024/04/26 19:26:55.666 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [Fri Apr 26 19:26:55 CST 2024] <<<<<< run test case partition_table success! >>>>>> [2024/04/26 19:26:55.729 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:26:55.745 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:55.748 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:55.750 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:55.768 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:55.839 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:26:55.848 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:26:55.848 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:55.942 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:55.955 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:55.963 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:56.043 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:56.064 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:56.138 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/04/26 19:26:56.264 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:56.330 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:56.362 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:56.366 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:56.427 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:56.460 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:56.542 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:26:56.547 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:26:56.573 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:56.644 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:56.652 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a227200014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z94t0-92h56, pid:4093, start at 2024-04-26 19:26:55.468113887 +0800 CST m=+5.094321540 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:28:55.474 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:26:55.432 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:16:55.432 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. [2024/04/26 19:26:56.661 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] system_tz Asia/Shanghai TiDB Global System Timezone. [2024/04/26 19:26:56.665 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. [2024/04/26 19:26:56.747 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] tikv_gc_leader_uuid 63c68a227200014 Current GC worker leader UUID. (DO NOT EDIT) [2024/04/26 19:26:56.753 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:56.769 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:56.863 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z94t0-92h56, pid:4093, start at 2024-04-26 19:26:55.468113887 +0800 CST m=+5.094321540 Host name and pid of current GC leader. (DO NOT EDIT) [2024/04/26 19:26:56.863 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] tikv_gc_leader_lease 20240426-19:28:55.474 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:26:55.432 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:16:55.432 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a228880016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z94t0-92h56, pid:4179, start at 2024-04-26 19:26:55.569545912 +0800 CST m=+5.142645605 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:28:55.576 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:26:55.573 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:16:55.573 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/ddl_puller_lag/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/ddl_puller_lag/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/ddl_puller_lag/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/ddl_puller_lag/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/ddl_puller_lag/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table test.finish_mark_for_ddl not exists for 2-th check, retry later [2024/04/26 19:26:56.960 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:56.964 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:56.965 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:57.057 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:57.150 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:26:57.155 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] table changefeed_pause_resume.t3 exists [2024/04/26 19:26:57.260 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:57.266 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:57.336 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:57.336 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:57.429 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:57.437 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:57.543 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:57.554 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:57.561 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:57.640 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:57.644 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:57.646 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:57.672 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:57.674 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/04/26 19:26:57.745 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:26:57.749 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:26:57.829 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:57.837 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:57.844 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:57.845 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:57.875 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:57.931 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/04/26 19:26:58.061 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:58.132 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/04/26 19:26:58.242 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:58.253 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:58.268 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:58.347 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:58.354 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:26:58.371 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:58.429 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:26:58.437 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:58.451 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:58.459 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:58.466 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:58.485 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:58.486 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:58.535 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:58.575 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:58.584 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_puller_lag.cli.5621.out cli tso query --pd=http://127.0.0.1:2379 [2024/04/26 19:26:58.729 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:58.740 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:58.765 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:58.862 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:58.932 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:26:59.043 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:59.051 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:26:59.059 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:26:59.133 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:59.136 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:26:59.156 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:59.156 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] check diff failed 1-th time, retry later [2024/04/26 19:26:59.237 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:59.240 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:59.241 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:59.277 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:59.329 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:59.344 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:59.381 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:26:59.381 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:59.437 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] table test.finish_mark_for_ddl not exists for 3-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/04/26 19:26:59.538 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:59.551 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:26:59.562 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:26:59.652 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:59.660 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:26:59.731 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:26:59.753 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a257f00016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-llqb6-jpndj, pid:1352, start at 2024-04-26 19:26:58.600320824 +0800 CST m=+5.254418324 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:28:58.609 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:26:58.606 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:16:58.606 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc) 3716673536 bytes in 10.51 secs (353716573 bytes/sec) [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [2024/04/26 19:26:59.861 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:26:59.934 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:26:59.942 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:26:59.974 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:26:59.978 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:26:59.995 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a25dd40003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h32rl-nl1kg, pid:1348, start at 2024-04-26 19:26:58.934117952 +0800 CST m=+5.263574131 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:28:58.942 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:26:58.933 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:16:58.933 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/04/26 19:27:00.036 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:27:00.062 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:27:00.071 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:27:00.148 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/04/26 19:27:00.154 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:27:00.233 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/04/26 19:27:00.350 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/04/26 19:27:00.396 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:27:00.428 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/04/26 19:27:00.459 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/04/26 19:27:00.476 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/04/26 19:27:00.478 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/04/26 19:27:00.505 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] + set +x + tso='449349109424062465 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349109424062465 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [2024/04/26 19:27:00.530 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/04/26 19:27:00.648 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [Fri Apr 26 19:27:00 CST 2024] <<<<<< START cdc server in ddl_puller_lag case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/processorDDLResolved=1*sleep(180000)' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_puller_lag.56735675.out server --log-file /tmp/tidb_cdc_test/ddl_puller_lag/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_puller_lag/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [2024/04/26 19:27:00.835 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:27:00.867 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/04/26 19:27:00.899 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/04/26 19:27:00.923 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/04/26 19:27:00.949 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] check diff failed 2-th time, retry later [2024/04/26 19:27:01.162 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/04/26 19:27:01.238 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsa54089c3_bbea_4f8b_8763_db63548c57d5"] [2024/04/26 19:27:01.246 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs5c232e02_0e07_4105_ab80_26277c6a45c0"] [2024/04/26 19:27:01.251 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/04/26 19:27:01.269 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs335135ff_c8ec_4dca_84a2_7cce637ba766"] table test.finish_mark_for_ddl not exists for 4-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/04/26 19:27:01.351 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs9c6a9e20_efdc_4b6b_98e6_4c92206e5a02"] [2024/04/26 19:27:01.447 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/04/26 19:27:01.561 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:27:01.570 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a257f00016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-llqb6-jpndj, pid:1352, start at 2024-04-26 19:26:58.600320824 +0800 CST m=+5.254418324 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:28:58.609 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:26:58.606 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:16:58.606 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a257e80014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-llqb6-jpndj, pid:1449, start at 2024-04-26 19:26:58.593898048 +0800 CST m=+5.180794853 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:28:58.600 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:26:58.604 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:16:58.604 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/new_ci_collation/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/new_ci_collation/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/new_ci_collation/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/new_ci_collation/tiflash-proxy.toml"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/new_ci_collation/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [2024/04/26 19:27:01.628 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:27:01.636 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:27:01.639 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs412a05b3_dcc3_4831_a02d_a998fd0ff836"] [2024/04/26 19:27:01.644 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:27:01.656 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:27:01.733 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:27:01.769 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/04/26 19:27:01.872 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:27:01.886 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:27:01.970 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:27:01.981 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:27:01.987 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/04/26 19:27:01.990 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:27:01.990 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/04/26 19:27:02.000 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:27:02.096 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a25dd40003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h32rl-nl1kg, pid:1348, start at 2024-04-26 19:26:58.934117952 +0800 CST m=+5.263574131 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:28:58.942 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:26:58.933 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:16:58.933 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a25ddc0003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h32rl-nl1kg, pid:1433, start at 2024-04-26 19:26:58.936050388 +0800 CST m=+5.213279912 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:28:58.945 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:26:58.935 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:16:58.935 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/tiflash/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/tiflash/tiflash/log/error.log arg matches is ArgMatches { args: {"log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/tiflash/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/tiflash/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/tiflash/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [2024/04/26 19:27:02.153 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:27:02.262 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:27:02.275 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:27:02.385 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:27:02.437 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/04/26 19:27:02.443 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:27:02.444 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:27:02.450 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/04/26 19:27:02.454 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:27:02.542 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:27:02.562 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:27:02.617 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:27:02.638 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:27:02.774 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:27:02.778 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/04/26 19:27:02.785 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:27:02.832 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:27:02.841 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/04/26 19:27:02.846 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] check diff failed 3-th time, retry later [Pipeline] sh [2024/04/26 19:27:02.926 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:27:02.929 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:27:02.947 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:27:02.958 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:27:03.078 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/04/26 19:27:03.085 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:27:03.085 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] table test.finish_mark_for_ddl not exists for 5-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a290980004 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2x98t-8x2nm, pid:3946, start at 2024-04-26 19:27:02.187879718 +0800 CST m=+7.190999639 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:29:02.200 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:27:02.182 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:17:02.182 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a290980004 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2x98t-8x2nm, pid:3946, start at 2024-04-26 19:27:02.187879718 +0800 CST m=+7.190999639 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:29:02.200 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:27:02.182 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:17:02.182 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a2923c0003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2x98t-8x2nm, pid:3990, start at 2024-04-26 19:27:02.288467924 +0800 CST m=+7.201255619 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:29:02.305 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:27:02.287 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:17:02.287 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored [2024/04/26 19:27:03.178 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:27:03.185 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/04/26 19:27:03.190 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:27:03.278 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:27:03.278 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:27:03.292 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:27:03.299 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [Pipeline] withEnv [Pipeline] { [Pipeline] stage [Pipeline] { (Test) [Pipeline] container [Pipeline] { [2024/04/26 19:27:03.383 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/04/26 19:27:03.391 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] Verifying Upstream TiFlash is started... [2024/04/26 19:27:03.440 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] Logging trace to /tmp/tidb_cdc_test/ddl_manager/tiflash/log/server.log [2024/04/26 19:27:03.545 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] Logging errors to /tmp/tidb_cdc_test/ddl_manager/tiflash/log/error.log [2024/04/26 19:27:03.552 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/04/26 19:27:03.556 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/ddl_manager/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/ddl_manager/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/ddl_manager/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a27c440011 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z9nq8-r4p88, pid:1419, start at 2024-04-26 19:27:00.899867121 +0800 CST m=+5.179615006 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:29:00.908 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:27:00.881 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:17:00.881 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a27c440011 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z9nq8-r4p88, pid:1419, start at 2024-04-26 19:27:00.899867121 +0800 CST m=+5.179615006 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:29:00.908 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:27:00.881 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:17:00.881 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a27dc40014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z9nq8-r4p88, pid:1500, start at 2024-04-26 19:27:01.00701814 +0800 CST m=+5.236009502 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:29:01.014 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:27:00.977 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:17:00.977 +0800 All versions after safe point can be accessed. (DO NOT EDIT) [2024/04/26 19:27:03.640 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:27:03.649 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:27:03.653 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:27:03.654 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:27:03.709 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/04/26 19:27:03.719 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:27:03.741 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:27:03.836 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:27:03.846 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/04/26 19:27:03.850 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.tiflash.cli.2843.out cli tso query --pd=http://127.0.0.1:2379 Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/resolve_lock/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/resolve_lock/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/resolve_lock/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/resolve_lock/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/resolve_lock/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:27:03 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/50a6aeed-347a-4420-9e5e-ebfa8626e56c {"id":"50a6aeed-347a-4420-9e5e-ebfa8626e56c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130821} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a285e35e8 50a6aeed-347a-4420-9e5e-ebfa8626e56c /tidb/cdc/default/default/upstream/7362135760900856622 {"id":7362135760900856622,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/50a6aeed-347a-4420-9e5e-ebfa8626e56c {"id":"50a6aeed-347a-4420-9e5e-ebfa8626e56c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130821} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a285e35e8 50a6aeed-347a-4420-9e5e-ebfa8626e56c /tidb/cdc/default/default/upstream/7362135760900856622 {"id":7362135760900856622,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/50a6aeed-347a-4420-9e5e-ebfa8626e56c {"id":"50a6aeed-347a-4420-9e5e-ebfa8626e56c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130821} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a285e35e8 50a6aeed-347a-4420-9e5e-ebfa8626e56c /tidb/cdc/default/default/upstream/7362135760900856622 {"id":7362135760900856622,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_puller_lag.cli.5732.out cli changefeed create --start-ts=449349109424062465 '--sink-uri=kafka+ssl://127.0.0.1:9092/ticdc-ddl-puller-lag-test-21438?protocol=open-protocol&partition-num=4&kafka-client-id=ddl_puller_lag&kafka-version=2.4.1&max-message-bytes=10485760' [2024/04/26 19:27:03.938 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:27:03.953 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/04/26 19:27:03.954 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:27:03.958 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:27:04.013 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/04/26 19:27:04.024 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:27:04.042 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:27:04.130 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:27:04.141 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:27:04.142 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [Fri Apr 26 19:27:03 CST 2024] <<<<<< START cdc server in new_ci_collation case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.new_ci_collation.28402842.out server --log-file /tmp/tidb_cdc_test/new_ci_collation/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/new_ci_collation/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Create changefeed successfully! ID: 3ba076be-7f93-4e6f-adb2-df2e1c45992c Info: {"upstream_id":7362135760900856622,"namespace":"default","id":"3ba076be-7f93-4e6f-adb2-df2e1c45992c","sink_uri":"kafka+ssl://127.0.0.1:9092/ticdc-ddl-puller-lag-test-21438?protocol=open-protocol\u0026partition-num=4\u0026kafka-client-id=ddl_puller_lag\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:27:04.230837077+08:00","start_ts":449349109424062465,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349109424062465,"checkpoint_ts":449349109424062465,"checkpoint_time":"2024-04-26 19:26:59.031"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... [2024/04/26 19:27:04.226 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:27:04.240 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:27:04.244 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:27:04.245 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:27:04.302 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/04/26 19:27:04.313 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:27:04.332 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:27:04.425 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:27:04.437 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:27:04.438 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/04/26 19:27:04.537 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:27:04.550 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:27:04.552 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:27:04.558 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:27:04.614 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/04/26 19:27:04.629 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:27:04.645 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [Pipeline] } [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] stage [Pipeline] { (Test) [Pipeline] // timeout [Pipeline] } [2024/04/26 19:27:04.726 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:27:04.737 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/04/26 19:27:04.739 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:27:04.824 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:27:04.835 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:27:04.846 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:27:04.853 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:27:04.897 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/04/26 19:27:04.931 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] // container [Pipeline] sh [2024/04/26 19:27:05.018 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/04/26 19:27:05.110 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/04/26 19:27:05.136 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/04/26 19:27:05.176 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] table test.finish_mark_for_ddl exists + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_basic.cli.3401.out cli changefeed pause -c simple-basic [Pipeline] { [Pipeline] withCredentials [2024/04/26 19:27:05.310 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN PASS [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Fri Apr 26 19:27:05 CST 2024] <<<<<< START cdc server in ddl_manager case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/ExecuteDDLSlowly=return(true)' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_manager.52925294.out server --log-file /tmp/tidb_cdc_test/ddl_manager/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_manager/cdc_data --cluster-id default + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [Pipeline] cache + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G17 Run cases: clustered_index processor_resolved_ts_fallback PROW_JOB_ID=586cc306-42d6-4207-90bf-aa4c613fe26b JENKINS_NODE_COOKIE=bd663b81-35d7-4836-8274-c9539df8f9b9 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6xvpn-7nz6g HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1783813226958426112","prowjobid":"586cc306-42d6-4207-90bf-aa4c613fe26b","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","base_link":"https://github.com/pingcap/tiflow/commit/d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","pulls":[{"number":10904,"author":"CharlesCheung96","sha":"16f5d59f936001f6d7031387873b3c668f3c5ae6","title":"redo(ticdc): enable pprof and set memory limit for redo applier","link":"https://github.com/pingcap/tiflow/pull/10904","commit_link":"https://github.com/pingcap/tiflow/pull/10904/commits/16f5d59f936001f6d7031387873b3c668f3c5ae6","author_link":"https://github.com/CharlesCheung96"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1735 TEST_GROUP=G17 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1783813226958426112 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=618760b8f23481032f6d0010db684d46f840e8e1 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-6xvpn GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6xvpn-7nz6g pingcap_tiflow_pull_cdc_integration_kafka_test_1735-6xvpn GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-6xvpn-7nz6g GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1735 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/clustered_index/run.sh using Sink-Type: kafka... <<================= [Fri Apr 26 19:27:05 CST 2024] <<<<<< skip test case clustered_index for kafka! >>>>>> find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_resolved_ts_fallback/run.sh using Sink-Type: kafka... <<================= check diff failed 4-th time, retry later [2024/04/26 19:27:05.474 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/04/26 19:27:05.655 +08:00] [INFO] [main.go:812] ["testMultiDDLs take %v47.443428633s"] coverage: 2.0% of statements in github.com/pingcap/tiflow/... + set +x [Fri Apr 26 19:27:05 CST 2024] <<<<<< START kafka consumer in ddl_puller_lag case >>>>>> [Fri Apr 26 19:27:05 CST 2024] <<<<<< run test case processor_resolved_ts_fallback success! >>>>>> + set +x + tso='449349110813687809 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349110813687809 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Fri Apr 26 19:27:05 CST 2024] <<<<<< START cdc server in tiflash case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.tiflash.28832885.out server --log-file /tmp/tidb_cdc_test/tiflash/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/tiflash/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [2024/04/26 19:27:05.735 +08:00] [INFO] [main.go:74] ["DefaultValue integration tests take 47.52298063s"] table mark.finish_mark_1 not exists for 1-th check, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resolve_lock.cli.2882.out cli tso query --pd=http://127.0.0.1:2379 + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_basic.cli.3436.out cli changefeed resume -c simple-basic + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:27:06 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/fabf4558-1919-409d-a4c5-5eefbc34f501 {"id":"fabf4558-1919-409d-a4c5-5eefbc34f501","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130824} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a286dbacc fabf4558-1919-409d-a4c5-5eefbc34f501 /tidb/cdc/default/default/upstream/7362135774091702037 {"id":7362135774091702037,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/fabf4558-1919-409d-a4c5-5eefbc34f501 {"id":"fabf4558-1919-409d-a4c5-5eefbc34f501","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130824} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a286dbacc fabf4558-1919-409d-a4c5-5eefbc34f501 /tidb/cdc/default/default/upstream/7362135774091702037 {"id":7362135774091702037,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/fabf4558-1919-409d-a4c5-5eefbc34f501 {"id":"fabf4558-1919-409d-a4c5-5eefbc34f501","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130824} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a286dbacc fabf4558-1919-409d-a4c5-5eefbc34f501 /tidb/cdc/default/default/upstream/7362135774091702037 {"id":7362135774091702037,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: 8bd2de95-3cfd-472d-b38f-53d34f429eb3 Info: {"upstream_id":7362135774091702037,"namespace":"default","id":"8bd2de95-3cfd-472d-b38f-53d34f429eb3","sink_uri":"kafka://127.0.0.1:9092/ticdc-new_ci_collation-test-8035?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:27:07.102527115+08:00","start_ts":449349110649585666,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349110649585666,"checkpoint_ts":449349110649585666,"checkpoint_time":"2024-04-26 19:27:03.706"} [Fri Apr 26 19:27:07 CST 2024] <<<<<< START kafka consumer in new_ci_collation case >>>>>> check diff failed 5-th time, retry later + set +x + tso='449349111337713665 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349111337713665 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Fri Apr 26 19:27:07 CST 2024] <<<<<< START cdc server in resolve_lock case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resolve_lock.29142916.out server --log-file /tmp/tidb_cdc_test/resolve_lock/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/resolve_lock/cdc_data --cluster-id default + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 PASS coverage: 2.1% of statements in github.com/pingcap/tiflow/... table mark.finish_mark_1 not exists for 2-th check, retry later table new_ci_collation_test.t1 not exists for 1-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:27:08 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bc826edc-20db-4ea6-87eb-61e97b60d02d {"id":"bc826edc-20db-4ea6-87eb-61e97b60d02d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130826} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a286ff7cd bc826edc-20db-4ea6-87eb-61e97b60d02d /tidb/cdc/default/default/upstream/7362135777510097629 {"id":7362135777510097629,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bc826edc-20db-4ea6-87eb-61e97b60d02d {"id":"bc826edc-20db-4ea6-87eb-61e97b60d02d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130826} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a286ff7cd bc826edc-20db-4ea6-87eb-61e97b60d02d /tidb/cdc/default/default/upstream/7362135777510097629 {"id":7362135777510097629,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bc826edc-20db-4ea6-87eb-61e97b60d02d {"id":"bc826edc-20db-4ea6-87eb-61e97b60d02d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130826} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a286ff7cd bc826edc-20db-4ea6-87eb-61e97b60d02d /tidb/cdc/default/default/upstream/7362135777510097629 {"id":7362135777510097629,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_manager.cli.5336.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-7738?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c=ddl-manager \033[0;36m<<< Run all test success >>>\033[0m + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:27:08 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c7622ce1-11d1-497a-9c12-7ed6d409435d {"id":"c7622ce1-11d1-497a-9c12-7ed6d409435d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130826} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a286b1ad5 c7622ce1-11d1-497a-9c12-7ed6d409435d /tidb/cdc/default/default/upstream/7362135772299206722 {"id":7362135772299206722,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c7622ce1-11d1-497a-9c12-7ed6d409435d {"id":"c7622ce1-11d1-497a-9c12-7ed6d409435d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130826} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a286b1ad5 c7622ce1-11d1-497a-9c12-7ed6d409435d /tidb/cdc/default/default/upstream/7362135772299206722 {"id":7362135772299206722,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c7622ce1-11d1-497a-9c12-7ed6d409435d {"id":"c7622ce1-11d1-497a-9c12-7ed6d409435d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130826} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a286b1ad5 c7622ce1-11d1-497a-9c12-7ed6d409435d /tidb/cdc/default/default/upstream/7362135772299206722 {"id":7362135772299206722,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: e61c3d93-3445-40e5-a63d-d658a762367c Info: {"upstream_id":7362135772299206722,"namespace":"default","id":"e61c3d93-3445-40e5-a63d-d658a762367c","sink_uri":"kafka://127.0.0.1:9092/ticdc-tiflash-test-16940?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:27:09.099666921+08:00","start_ts":449349110813687809,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349110813687809,"checkpoint_ts":449349110813687809,"checkpoint_time":"2024-04-26 19:27:04.332"} [Fri Apr 26 19:27:09 CST 2024] <<<<<< START kafka consumer in tiflash case >>>>>> + set +x check diff successfully table cdc_tiflash_test.multi_data_type not exists for 1-th check, retry later Create changefeed successfully! ID: ddl-manager Info: {"upstream_id":7362135777510097629,"namespace":"default","id":"ddl-manager","sink_uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-7738?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:27:09.291433878+08:00","start_ts":449349112058871809,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349112058871809,"checkpoint_ts":449349112058871809,"checkpoint_time":"2024-04-26 19:27:09.082"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... table mark.finish_mark_1 not exists for 3-th check, retry later table new_ci_collation_test.t1 exists table new_ci_collation_test.t2 not exists for 1-th check, retry later check diff failed 1-th time, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:27:10 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/93631700-1173-467c-afd7-144eb9afb8a9 {"id":"93631700-1173-467c-afd7-144eb9afb8a9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130828} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2878f7d5 93631700-1173-467c-afd7-144eb9afb8a9 /tidb/cdc/default/default/upstream/7362135779042186529 {"id":7362135779042186529,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/93631700-1173-467c-afd7-144eb9afb8a9 {"id":"93631700-1173-467c-afd7-144eb9afb8a9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130828} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2878f7d5 93631700-1173-467c-afd7-144eb9afb8a9 /tidb/cdc/default/default/upstream/7362135779042186529 {"id":7362135779042186529,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/93631700-1173-467c-afd7-144eb9afb8a9 {"id":"93631700-1173-467c-afd7-144eb9afb8a9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130828} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2878f7d5 93631700-1173-467c-afd7-144eb9afb8a9 /tidb/cdc/default/default/upstream/7362135779042186529 {"id":7362135779042186529,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resolve_lock.cli.2971.out cli changefeed create --start-ts=449349111337713665 '--sink-uri=kafka://127.0.0.1:9092/ticdc-resolve-lock-test-11311?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' + set +x [Fri Apr 26 19:27:10 CST 2024] <<<<<< START kafka consumer in ddl_manager case >>>>>> Create changefeed successfully! ID: aee72b80-a591-451e-8cf9-2f5bb43d5716 Info: {"upstream_id":7362135779042186529,"namespace":"default","id":"aee72b80-a591-451e-8cf9-2f5bb43d5716","sink_uri":"kafka://127.0.0.1:9092/ticdc-resolve-lock-test-11311?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:27:11.420664453+08:00","start_ts":449349111337713665,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349111337713665,"checkpoint_ts":449349111337713665,"checkpoint_time":"2024-04-26 19:27:06.331"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... table cdc_tiflash_test.multi_data_type not exists for 2-th check, retry later table mark.finish_mark_1 not exists for 4-th check, retry later check diff failed 2-th time, retry later table new_ci_collation_test.t2 exists table new_ci_collation_test.t3 not exists for 1-th check, retry later + set +x [Fri Apr 26 19:27:12 CST 2024] <<<<<< START kafka consumer in resolve_lock case >>>>>> go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading go.uber.org/atomic v1.11.0 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading github.com/coreos/go-semver v0.3.1 go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/google/uuid v1.6.0 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading github.com/golang/protobuf v1.5.4 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/google/btree v1.1.2 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/docker/go-units v0.5.0 go: downloading golang.org/x/sync v0.7.0 go: downloading google.golang.org/protobuf v1.33.0 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda go: downloading github.com/prometheus/procfs v0.13.0 go: downloading golang.org/x/sys v0.19.0 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading golang.org/x/net v0.24.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading golang.org/x/text v0.14.0 table cdc_tiflash_test.multi_data_type exists check diff successfully \033[0;36m<<< Run all test success >>>\033[0m wait process cdc.test exit for 1-th time... table mark.finish_mark_1 not exists for 5-th check, retry later table test.finish_mark not exists for 1-th check, retry later wait process cdc.test exit for 2-th time... table new_ci_collation_test.t3 exists table new_ci_collation_test.t4 not exists for 1-th check, retry later cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:27:15 CST 2024] <<<<<< run test case tiflash success! >>>>>> go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading github.com/gorilla/mux v1.8.0 go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/golang/snappy v0.0.4 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/tidwall/btree v1.7.0 go: downloading github.com/stretchr/testify v1.9.0 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading golang.org/x/tools v0.20.0 go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/vbauerster/mpb/v7 v7.5.3 go: downloading golang.org/x/term v0.19.0 go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading golang.org/x/time v0.5.0 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/spf13/cobra v1.8.0 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading google.golang.org/api v0.170.0 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading github.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0 go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/Masterminds/semver v1.5.0 go: downloading k8s.io/api v0.28.6 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/emirpasic/gods v1.18.1 go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/jfcg/sixb v1.3.8 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading cloud.google.com/go v0.112.2 go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading github.com/robfig/cron v1.2.0 go: downloading golang.org/x/crypto v0.22.0 go: downloading github.com/apache/thrift v0.16.0 go: downloading github.com/rivo/uniseg v0.4.7 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/kr/pretty v0.3.1 check diff failed 3-th time, retry later go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading github.com/kr/text v0.2.0 go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/ncw/directio v1.0.5 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading github.com/go-logr/logr v1.4.1 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/go-logr/stdr v1.2.2 go: downloading github.com/DataDog/zstd v1.5.5 go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 table mark.finish_mark_1 not exists for 6-th check, retry later go: downloading k8s.io/apimachinery v0.28.6 go: downloading gopkg.in/inf.v0 v0.9.1 go: downloading sigs.k8s.io/structured-merge-diff/v4 v4.4.1 go: downloading github.com/google/gofuzz v1.2.0 go: downloading k8s.io/klog/v2 v2.120.1 go: downloading k8s.io/utils v0.0.0-20230726121419-3b25d923346b go: downloading sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd table test.finish_mark not exists for 2-th check, retry later table new_ci_collation_test.t4 exists table new_ci_collation_test.t5 not exists for 1-th check, retry later Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1735-ww8h2-wvzdx is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1735-ww8h2-gqntb --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "d6a9f2d107f5d3ca286ec362b4141f495a08724e" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1735-ww8h2" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-ww8h2-wvzdx" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-ww8h2-wvzdx" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-ww8h2-wvzdx" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1735-ww8h2-wvzdx in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test check diff failed 4-th time, retry later table mark.finish_mark_1 exists table mark.finish_mark_2 not exists for 1-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc) 3716673536 bytes in 10.45 secs (355605418 bytes/sec) [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { table test.finish_mark not exists for 3-th check, retry later [Pipeline] sh [Pipeline] { table new_ci_collation_test.t5 exists check diff failed 1-th time, retry later + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 check diff failed 5-th time, retry later go: downloading github.com/google/s2a-go v0.1.7 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading github.com/felixge/httpsnoop v1.0.4 go: downloading github.com/jmespath/go-jmespath v0.4.0 table test.finish_mark not exists for 4-th check, retry later [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc already exists) [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc already exists) [Pipeline] } [Pipeline] { table mark.finish_mark_2 not exists for 2-th check, retry later [Pipeline] // cache [Pipeline] // cache [Pipeline] } [Pipeline] } [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache check diff failed 2-th time, retry later check diff successfully Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1735-52ncr-nbk6v is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1735-52ncr-2prhv --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "2fc045728bf6fc19c55d772915c5d402fafeebe1" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1735-52ncr" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-52ncr-nbk6v" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-52ncr-nbk6v" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-52ncr-nbk6v" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1735-52ncr-nbk6v in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test table test.finish_mark not exists for 5-th check, retry later table mark.finish_mark_2 not exists for 3-th check, retry later ERROR: Failed to launch pingcap-tiflow-pull-cdc-integration-kafka-test-1735-q9n5k-0k531 io.fabric8.kubernetes.client.KubernetesClientTimeoutException: Timed out waiting for [1000000] milliseconds for [Pod] with name:[pingcap-tiflow-pull-cdc-integration-kafka-test-1735-q9n5k-0k531] in namespace [jenkins-tiflow]. at io.fabric8.kubernetes.client.dsl.internal.BaseOperation.waitUntilCondition(BaseOperation.java:939) at io.fabric8.kubernetes.client.dsl.internal.BaseOperation.waitUntilReady(BaseOperation.java:921) at io.fabric8.kubernetes.client.dsl.internal.BaseOperation.waitUntilReady(BaseOperation.java:97) at org.csanchez.jenkins.plugins.kubernetes.KubernetesLauncher.launch(KubernetesLauncher.java:185) at hudson.slaves.SlaveComputer.lambda$_connect$0(SlaveComputer.java:297) at jenkins.util.ContextResettingExecutorService$2.call(ContextResettingExecutorService.java:46) at jenkins.security.ImpersonatingExecutorService$2.call(ImpersonatingExecutorService.java:80) at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264) at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128) at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628) at java.base/java.lang.Thread.run(Thread.java:829) check diff failed 3-th time, retry later check diff failed 1-th time, retry later table mark.finish_mark_2 not exists for 4-th check, retry later table test.finish_mark not exists for 6-th check, retry later check diff failed 4-th time, retry later Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1735-tmlzt-m5s5x is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1735-tmlzt-s3vmf --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "e59af30910eac539e300a437ac58f7dcbae8cabf" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1735-tmlzt" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-tmlzt-m5s5x" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-tmlzt-m5s5x" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-tmlzt-m5s5x" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1735-tmlzt-m5s5x in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test check diff failed 2-th time, retry later table mark.finish_mark_2 not exists for 5-th check, retry later table test.finish_mark not exists for 7-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/region_merge/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... check diff successfully check diff failed 1-th time, retry later check diff failed 3-th time, retry later table mark.finish_mark_2 not exists for 6-th check, retry later table test.finish_mark not exists for 8-th check, retry later wait process 5297 exit for 1-th time... wait process 5297 exit for 2-th time... wait process 5297 exit for 3-th time... wait process 5297 exit for 4-th time... wait process 5297 exit for 5-th time... wait process 5297 exit for 6-th time... check diff successfully check diff failed 4-th time, retry later start tidb cluster in /tmp/tidb_cdc_test/region_merge Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... wait process 5297 exit for 7-th time... wait process cdc.test exit for 1-th time... table test.finish_mark not exists for 9-th check, retry later wait process 5297 exit for 8-th time... wait process cdc.test exit for 2-th time... table mark.finish_mark_2 exists table mark.finish_mark_3 not exists for 1-th check, retry later wait process 5297 exit for 9-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:27:30 CST 2024] <<<<<< run test case new_ci_collation success! >>>>>> wait process 5297 exit for 10-th time... check diff failed 5-th time, retry later wait process 5297 exit for 11-th time... table test.finish_mark not exists for 10-th check, retry later /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils/kill_cdc_pid: line 19: kill: (5297) - No such process wait process 5297 exit for 12-th time... process 5297 already exit [Fri Apr 26 19:27:32 CST 2024] <<<<<< START cdc server in ddl_manager case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/ExecuteDDLSlowly=return(true)' ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_manager.54445446.out server --log-file /tmp/tidb_cdc_test/ddl_manager/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_manager/cdc_data --cluster-id default * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table mark.finish_mark_3 not exists for 2-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc) 3716673536 bytes in 8.68 secs (428173171 bytes/sec) [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] checkout [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { The recommended git tool is: git [Pipeline] sh [Pipeline] // timeout [Pipeline] } [Pipeline] // dir [Pipeline] // dir [Pipeline] } [Pipeline] } + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@3a442095; decorates RemoteLauncher[hudson.remoting.Channel@5f7541ef:JNLP4-connect connection from 10.233.68.15/10.233.68.15:57958] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 check diff successfully Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table mark.finish_mark_3 not exists for 3-th check, retry later [Pipeline] // container [Pipeline] sh table test.finish_mark not exists for 11-th check, retry later [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] { [Pipeline] { + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G07 Run cases: kv_client_stream_reconnect cdc split_region PROW_JOB_ID=586cc306-42d6-4207-90bf-aa4c613fe26b JENKINS_NODE_COOKIE=2e3e5e7f-81a2-436a-9df4-e03de57bc43b BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-cdv81-9zlwn HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1783813226958426112","prowjobid":"586cc306-42d6-4207-90bf-aa4c613fe26b","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","base_link":"https://github.com/pingcap/tiflow/commit/d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","pulls":[{"number":10904,"author":"CharlesCheung96","sha":"16f5d59f936001f6d7031387873b3c668f3c5ae6","title":"redo(ticdc): enable pprof and set memory limit for redo applier","link":"https://github.com/pingcap/tiflow/pull/10904","commit_link":"https://github.com/pingcap/tiflow/pull/10904/commits/16f5d59f936001f6d7031387873b3c668f3c5ae6","author_link":"https://github.com/CharlesCheung96"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1735 TEST_GROUP=G07 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1783813226958426112 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=618760b8f23481032f6d0010db684d46f840e8e1 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-cdv81 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-cdv81-9zlwn pingcap_tiflow_pull_cdc_integration_kafka_test_1735-cdv81 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-cdv81-9zlwn GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1735 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kv_client_stream_reconnect/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] checkout [Pipeline] checkout check diff failed 1-th time, retry later The recommended git tool is: git The recommended git tool is: git [Pipeline] // stage [Pipeline] // stage [Pipeline] // container [Pipeline] } [Pipeline] } [Pipeline] sh [Pipeline] // container [Pipeline] // container [Pipeline] } [Pipeline] } Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... [Pipeline] // withEnv [Pipeline] // withEnv + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G14 [Pipeline] } [Pipeline] } [Pipeline] // node Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@4d46ec47; decorates RemoteLauncher[hudson.remoting.Channel@24007d48:JNLP4-connect connection from 10.233.123.249/10.233.123.249:55802] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 table mark.finish_mark_3 not exists for 4-th check, retry later No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@76f86f59; decorates RemoteLauncher[hudson.remoting.Channel@1c9ee89d:JNLP4-connect connection from 10.233.127.200/10.233.127.200:45946] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 [Pipeline] // node [Pipeline] } [Pipeline] } Run cases: changefeed_finish force_replicate_table PROW_JOB_ID=586cc306-42d6-4207-90bf-aa4c613fe26b JENKINS_NODE_COOKIE=ad01f181-d57d-4597-9dea-d567f3b83c64 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-1hp4d-3x2xd HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1783813226958426112","prowjobid":"586cc306-42d6-4207-90bf-aa4c613fe26b","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","base_link":"https://github.com/pingcap/tiflow/commit/d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","pulls":[{"number":10904,"author":"CharlesCheung96","sha":"16f5d59f936001f6d7031387873b3c668f3c5ae6","title":"redo(ticdc): enable pprof and set memory limit for redo applier","link":"https://github.com/pingcap/tiflow/pull/10904","commit_link":"https://github.com/pingcap/tiflow/pull/10904/commits/16f5d59f936001f6d7031387873b3c668f3c5ae6","author_link":"https://github.com/CharlesCheung96"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1735 TEST_GROUP=G14 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1783813226958426112 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=618760b8f23481032f6d0010db684d46f840e8e1 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-1hp4d GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-1hp4d pingcap-tiflow-pull-cdc-integration-kafka-test-1735-1hp4d-3x2xd GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-1hp4d-3x2xd GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1735 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_finish/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] // podTemplate Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 [Pipeline] // podTemplate table test.finish_mark not exists for 12-th check, retry later Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 [Pipeline] } [Pipeline] } [Pipeline] // withEnv + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > [Pipeline] // withEnv [Pipeline] } [Pipeline] } [Pipeline] // stage [Pipeline] // stage [Pipeline] } [Pipeline] } < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:27:37 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7362135777510097629 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-7738?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-04-26 19:27:09.291433878 +0800 CST StartTs:449349112058871809 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0022c13b0 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-20-g16f5d59f9 Epoch:449349112111300610} {CheckpointTs:449349112845303821 MinTableBarrierTs:449349112845303821 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6ed52867-9f60-4824-92da-8e2f3241c705 {"id":"6ed52867-9f60-4824-92da-8e2f3241c705","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130852} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a286ffad2 6ed52867-9f60-4824-92da-8e2f3241c705 /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7362135777510097629,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-7738?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:27:09.291433878+08:00","start-ts":449349112058871809,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349112111300610} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449349112845303821,"min-table-barrier-ts":449349112845303821,"admin-job-type":0} /tidb/cdc/default/default/task/position/6ed52867-9f60-4824-92da-8e2f3241c705/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7362135777510097629 {"id":7362135777510097629,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7362135777510097629 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-7738?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-04-26 19:27:09.291433878 +0800 CST StartTs:449349112058871809 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0022c13b0 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-20-g16f5d59f9 Epoch:449349112111300610} {CheckpointTs:449349112845303821 MinTableBarrierTs:449349112845303821 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6ed52867-9f60-4824-92da-8e2f3241c705 {"id":"6ed52867-9f60-4824-92da-8e2f3241c705","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130852} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a286ffad2 6ed52867-9f60-4824-92da-8e2f3241c705 /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7362135777510097629,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-7738?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:27:09.291433878+08:00","start-ts":449349112058871809,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349112111300610} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449349112845303821,"min-table-barrier-ts":449349112845303821,"admin-job-type":0} /tidb/cdc/default/default/task/position/6ed52867-9f60-4824-92da-8e2f3241c705/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7362135777510097629 {"id":7362135777510097629,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7362135777510097629 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-7738?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-04-26 19:27:09.291433878 +0800 CST StartTs:449349112058871809 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0022c13b0 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-20-g16f5d59f9 Epoch:449349112111300610} {CheckpointTs:449349112845303821 MinTableBarrierTs:449349112845303821 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6ed52867-9f60-4824-92da-8e2f3241c705 {"id":"6ed52867-9f60-4824-92da-8e2f3241c705","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130852} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a286ffad2 6ed52867-9f60-4824-92da-8e2f3241c705 /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7362135777510097629,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-7738?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:27:09.291433878+08:00","start-ts":449349112058871809,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349112111300610} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449349112845303821,"min-table-barrier-ts":449349112845303821,"admin-job-type":0} /tidb/cdc/default/default/task/position/6ed52867-9f60-4824-92da-8e2f3241c705/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7362135777510097629 {"id":7362135777510097629,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + break + set +x [Fri Apr 26 19:27:37 CST 2024] <<<<<< START cdc server in ddl_manager case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/ExecuteDDLSlowly=return(true)' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_manager.54955497.out server --log-file /tmp/tidb_cdc_test/ddl_manager/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_manager/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > check diff failed 2-th time, retry later Avoid second fetch Checking out Revision cb9bc9e8822a01a5d59a2f670fb429e588065145 (origin/main) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Commit message: "feat(tidb): test flashbacktest package (#2942)" [Pipeline] withEnv [Pipeline] { start tidb cluster in /tmp/tidb_cdc_test/kv_client_stream_reconnect Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 [Pipeline] container [Pipeline] { table test.finish_mark not exists for 13-th check, retry later [Pipeline] stage [Pipeline] { (Test) [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f cb9bc9e8822a01a5d59a2f670fb429e588065145 # timeout=10 Verifying upstream PD is started... < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:27:38 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7362135777510097629 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-7738?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-04-26 19:27:09.291433878 +0800 CST StartTs:449349112058871809 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0022c13b0 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-20-g16f5d59f9 Epoch:449349112111300610} {CheckpointTs:449349112845303821 MinTableBarrierTs:449349112845303821 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6ed52867-9f60-4824-92da-8e2f3241c705 {"id":"6ed52867-9f60-4824-92da-8e2f3241c705","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130852} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a286ffad2 6ed52867-9f60-4824-92da-8e2f3241c705 /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7362135777510097629,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-7738?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:27:09.291433878+08:00","start-ts":449349112058871809,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349112111300610} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449349112989483009,"min-table-barrier-ts":449349112989483009,"admin-job-type":0} /tidb/cdc/default/default/task/position/6ed52867-9f60-4824-92da-8e2f3241c705/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7362135777510097629 {"id":7362135777510097629,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7362135777510097629 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-7738?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-04-26 19:27:09.291433878 +0800 CST StartTs:449349112058871809 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0022c13b0 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-20-g16f5d59f9 Epoch:449349112111300610} {CheckpointTs:449349112845303821 MinTableBarrierTs:449349112845303821 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6ed52867-9f60-4824-92da-8e2f3241c705 {"id":"6ed52867-9f60-4824-92da-8e2f3241c705","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130852} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a286ffad2 6ed52867-9f60-4824-92da-8e2f3241c705 /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7362135777510097629,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-7738?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:27:09.291433878+08:00","start-ts":449349112058871809,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349112111300610} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449349112989483009,"min-table-barrier-ts":449349112989483009,"admin-job-type":0} /tidb/cdc/default/default/task/position/6ed52867-9f60-4824-92da-8e2f3241c705/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7362135777510097629 {"id":7362135777510097629,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7362135777510097629 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-7738?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-04-26 19:27:09.291433878 +0800 CST StartTs:449349112058871809 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0022c13b0 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-20-g16f5d59f9 Epoch:449349112111300610} {CheckpointTs:449349112845303821 MinTableBarrierTs:449349112845303821 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6ed52867-9f60-4824-92da-8e2f3241c705 {"id":"6ed52867-9f60-4824-92da-8e2f3241c705","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130852} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a286ffad2 6ed52867-9f60-4824-92da-8e2f3241c705 /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7362135777510097629,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-7738?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:27:09.291433878+08:00","start-ts":449349112058871809,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349112111300610} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449349112989483009,"min-table-barrier-ts":449349112989483009,"admin-job-type":0} /tidb/cdc/default/default/task/position/6ed52867-9f60-4824-92da-8e2f3241c705/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7362135777510097629 {"id":7362135777510097629,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x table ddl_manager.finish_mark not exists for 1-th check, retry later [Pipeline] withCredentials > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Avoid second fetch Checking out Revision cb9bc9e8822a01a5d59a2f670fb429e588065145 (origin/main) [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { table mark.finish_mark_3 not exists for 5-th check, retry later [Pipeline] cache Avoid second fetch Checking out Revision cb9bc9e8822a01a5d59a2f670fb429e588065145 (origin/main) Commit message: "feat(tidb): test flashbacktest package (#2942)" Commit message: "feat(tidb): test flashbacktest package (#2942)" start tidb cluster in /tmp/tidb_cdc_test/changefeed_finish Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... check diff failed 3-th time, retry later > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f cb9bc9e8822a01a5d59a2f670fb429e588065145 # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f cb9bc9e8822a01a5d59a2f670fb429e588065145 # timeout=10 table test.finish_mark exists check diff successfully table ddl_manager.finish_mark not exists for 2-th check, retry later table mark.finish_mark_3 not exists for 6-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 4-th time, retry later Verifying downstream PD is started... table ddl_manager.finish_mark not exists for 3-th check, retry later Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h8q8j-cjcsw table mark.finish_mark_3 not exists for 7-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/batch_add_table/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 5-th time, retry later table ddl_manager.finish_mark not exists for 4-th check, retry later table mark.finish_mark_3 not exists for 8-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a4f5bc0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h32rl-nl1kg, pid:4184, start at 2024-04-26 19:27:41.449784458 +0800 CST m=+5.458533833 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:29:41.457 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:27:41.423 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:17:41.423 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a4f5bc0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h32rl-nl1kg, pid:4184, start at 2024-04-26 19:27:41.449784458 +0800 CST m=+5.458533833 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:29:41.457 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:27:41.423 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:17:41.423 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a4f5940014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h32rl-nl1kg, pid:4266, start at 2024-04-26 19:27:41.435861349 +0800 CST m=+5.394300553 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:29:41.442 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:27:41.413 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:17:41.413 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/region_merge/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/region_merge/tiflash/log/error.log arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/region_merge/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/region_merge/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/region_merge/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table test.finish_mark not exists for 1-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/batch_add_table Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff successfully table mark.finish_mark_3 not exists for 9-th check, retry later [Fri Apr 26 19:27:46 CST 2024] <<<<<< START cdc server in region_merge case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.region_merge.57225724.out server --log-file /tmp/tidb_cdc_test/region_merge/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/region_merge/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table ddl_manager.finish_mark not exists for 5-th check, retry later check diff failed 1-th time, retry later table test.finish_mark not exists for 2-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 6-th check, retry later table mark.finish_mark_3 not exists for 10-th check, retry later Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h8q8j-cjcsw is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1735-h8q8j-r2n5w --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "db2b538419b13652dbe55078cfcfe3e071785da5" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1735-h8q8j" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h8q8j-cjcsw" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h8q8j-cjcsw" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h8q8j-cjcsw" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h8q8j-cjcsw in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:27:49 GMT < Content-Length: 859 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a0bafb83-72e0-4201-8d9d-2f9ac5040dbd {"id":"a0bafb83-72e0-4201-8d9d-2f9ac5040dbd","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130866} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2918173a a0bafb83-72e0-4201-8d9d-2f9ac5040dbd /tidb/cdc/default/default/upstream/7362135950460400538 {"id":7362135950460400538,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a0bafb83-72e0-4201-8d9d-2f9ac5040dbd {"id":"a0bafb83-72e0-4201-8d9d-2f9ac5040dbd","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130866} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2918173a a0bafb83-72e0-4201-8d9d-2f9ac5040dbd /tidb/cdc/default/default/upstream/7362135950460400538 {"id":7362135950460400538,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a0bafb83-72e0-4201-8d9d-2f9ac5040dbd {"id":"a0bafb83-72e0-4201-8d9d-2f9ac5040dbd","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130866} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2918173a a0bafb83-72e0-4201-8d9d-2f9ac5040dbd /tidb/cdc/default/default/upstream/7362135950460400538 {"id":7362135950460400538,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: 58e0e5d5-a5ad-494a-9269-b0181e30c9d0 Info: {"upstream_id":7362135950460400538,"namespace":"default","id":"58e0e5d5-a5ad-494a-9269-b0181e30c9d0","sink_uri":"kafka://127.0.0.1:9092/ticdc-region-merge-test-12264?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:27:49.638937273+08:00","start_ts":449349122647130116,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349122647130116,"checkpoint_ts":449349122647130116,"checkpoint_time":"2024-04-26 19:27:49.473"} [Fri Apr 26 19:27:49 CST 2024] <<<<<< START kafka consumer in region_merge case >>>>>> check diff failed 2-th time, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table test.finish_mark not exists for 3-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 7-th check, retry later table mark.finish_mark_3 not exists for 11-th check, retry later split_and_random_merge scale: 20 Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc) 3716673536 bytes in 9.89 secs (375730048 bytes/sec) [Pipeline] { check diff failed 3-th time, retry later Starting Upstream TiDB... table test.finish_mark not exists for 4-th check, retry later Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a56d9c0013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-cdv81-9zlwn, pid:1292, start at 2024-04-26 19:27:49.13097603 +0800 CST m=+5.229125856 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:29:49.137 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:27:49.095 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:17:49.095 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a56d9c0013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-cdv81-9zlwn, pid:1292, start at 2024-04-26 19:27:49.13097603 +0800 CST m=+5.229125856 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:29:49.137 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:27:49.095 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:17:49.095 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a56da40013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-cdv81-9zlwn, pid:1371, start at 2024-04-26 19:27:49.12664237 +0800 CST m=+5.156610506 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:29:49.136 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:27:49.097 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:17:49.097 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/log/error.log arg matches is ArgMatches { args: {"log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash-proxy.toml"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table ddl_manager.finish_mark not exists for 8-th check, retry later table mark.finish_mark_3 not exists for 12-th check, retry later [Pipeline] container [Pipeline] { [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] sh [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] checkout [Pipeline] stage [Pipeline] { (Test) The recommended git tool is: git [Pipeline] stage [Pipeline] { (Test) [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { check diff failed 4-th time, retry later [Pipeline] withCredentials + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] { table test.finish_mark not exists for 5-th check, retry later [Pipeline] { [Pipeline] } [Pipeline] // timeout [Pipeline] } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] // container [Pipeline] sh VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a589e80013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-1hp4d-3x2xd, pid:1350, start at 2024-04-26 19:27:50.941118052 +0800 CST m=+5.296014574 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:29:50.950 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:27:50.956 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:17:50.956 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a589e80013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-1hp4d-3x2xd, pid:1350, start at 2024-04-26 19:27:50.941118052 +0800 CST m=+5.296014574 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:29:50.950 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:27:50.956 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:17:50.956 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a58ab40014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-1hp4d-3x2xd, pid:1435, start at 2024-04-26 19:27:50.980555088 +0800 CST m=+5.281230789 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:29:50.986 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:27:50.957 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:17:50.957 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/changefeed_finish/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/changefeed_finish/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_finish/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_finish/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_finish/tiflash-proxy.toml"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [Fri Apr 26 19:27:54 CST 2024] <<<<<< START cdc server in kv_client_stream_reconnect case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/kv/kvClientForceReconnect=return(true)' + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kv_client_stream_reconnect.28032805.out server --log-file /tmp/tidb_cdc_test/kv_client_stream_reconnect/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kv_client_stream_reconnect/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table ddl_manager.finish_mark not exists for 9-th check, retry later [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache table mark.finish_mark_3 not exists for 13-th check, retry later + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G02 Run cases: consistent_replicate_ddl consistent_replicate_gbk consistent_replicate_nfs consistent_replicate_storage_file consistent_replicate_storage_file_large_value consistent_replicate_storage_s3 consistent_partition_table kafka_big_messages_v2 multi_tables_ddl_v2 multi_topics_v2 storage_cleanup csv_storage_basic csv_storage_multi_tables_ddl csv_storage_partition_table PROW_JOB_ID=586cc306-42d6-4207-90bf-aa4c613fe26b JENKINS_NODE_COOKIE=532d878c-2846-480b-b7c8-dee750e5c31e BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-ww8h2-wvzdx HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1783813226958426112","prowjobid":"586cc306-42d6-4207-90bf-aa4c613fe26b","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","base_link":"https://github.com/pingcap/tiflow/commit/d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","pulls":[{"number":10904,"author":"CharlesCheung96","sha":"16f5d59f936001f6d7031387873b3c668f3c5ae6","title":"redo(ticdc): enable pprof and set memory limit for redo applier","link":"https://github.com/pingcap/tiflow/pull/10904","commit_link":"https://github.com/pingcap/tiflow/pull/10904/commits/16f5d59f936001f6d7031387873b3c668f3c5ae6","author_link":"https://github.com/CharlesCheung96"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1735 TEST_GROUP=G02 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1783813226958426112 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=618760b8f23481032f6d0010db684d46f840e8e1 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-ww8h2 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-ww8h2 pingcap-tiflow-pull-cdc-integration-kafka-test-1735-ww8h2-wvzdx GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-ww8h2-wvzdx GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1735 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_ddl/run.sh using Sink-Type: kafka... <<================= [Fri Apr 26 19:27:54 CST 2024] <<<<<< run test case consistent_replicate_ddl success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@7a0c7023; decorates RemoteLauncher[hudson.remoting.Channel@35dcf4dd:JNLP4-connect connection from 10.233.67.69/10.233.67.69:38770] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 table test.finish_mark not exists for 6-th check, retry later [Fri Apr 26 19:27:55 CST 2024] <<<<<< START cdc server in changefeed_finish case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_finish.28212823.out server --log-file /tmp/tidb_cdc_test/changefeed_finish/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_finish/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff failed 5-th time, retry later table mark.finish_mark_3 not exists for 14-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 10-th check, retry later *************************** 1. row *************************** count(distinct region_id): 1 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:27:57 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/beb42053-6bdb-4d8d-a71c-34b2c27961a7 {"id":"beb42053-6bdb-4d8d-a71c-34b2c27961a7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130874} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a293133ca beb42053-6bdb-4d8d-a71c-34b2c27961a7 /tidb/cdc/default/default/upstream/7362135987422406695 {"id":7362135987422406695,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/beb42053-6bdb-4d8d-a71c-34b2c27961a7 {"id":"beb42053-6bdb-4d8d-a71c-34b2c27961a7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130874} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a293133ca beb42053-6bdb-4d8d-a71c-34b2c27961a7 /tidb/cdc/default/default/upstream/7362135987422406695 {"id":7362135987422406695,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/beb42053-6bdb-4d8d-a71c-34b2c27961a7 {"id":"beb42053-6bdb-4d8d-a71c-34b2c27961a7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130874} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a293133ca beb42053-6bdb-4d8d-a71c-34b2c27961a7 /tidb/cdc/default/default/upstream/7362135987422406695 {"id":7362135987422406695,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x table test.finish_mark not exists for 7-th check, retry later [Fri Apr 26 19:27:57 CST 2024] <<<<<< START kafka consumer in kv_client_stream_reconnect case >>>>>> check diff failed 6-th time, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a5e9300019 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-llqb6-jpndj, pid:4371, start at 2024-04-26 19:27:57.041483697 +0800 CST m=+5.182907908 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:29:57.048 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:27:57.004 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:17:57.004 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_gbk/run.sh using Sink-Type: kafka... <<================= * About to connect() to 127.0.0.1 port 24927 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:24927; Connection refused * Closing connection 0 You are running an older version of MinIO released 3 years ago Update: Run `mc admin update` Attempting encryption of all config, IAM users and policies on MinIO backend table ddl_manager.finish_mark not exists for 11-th check, retry later table mark.finish_mark_3 not exists for 15-th check, retry later Endpoint: http://127.0.0.1:24927 Object API (Amazon S3 compatible): Go: https://docs.min.io/docs/golang-client-quickstart-guide Java: https://docs.min.io/docs/java-client-quickstart-guide Python: https://docs.min.io/docs/python-client-quickstart-guide JavaScript: https://docs.min.io/docs/javascript-client-quickstart-guide .NET: https://docs.min.io/docs/dotnet-client-quickstart-guide > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:27:59 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/658443a6-fca0-4ead-ba75-4c8b56fca81b {"id":"658443a6-fca0-4ead-ba75-4c8b56fca81b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130876} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2935edd1 658443a6-fca0-4ead-ba75-4c8b56fca81b /tidb/cdc/default/default/upstream/7362135986388721498 {"id":7362135986388721498,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/658443a6-fca0-4ead-ba75-4c8b56fca81b {"id":"658443a6-fca0-4ead-ba75-4c8b56fca81b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130876} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2935edd1 658443a6-fca0-4ead-ba75-4c8b56fca81b /tidb/cdc/default/default/upstream/7362135986388721498 {"id":7362135986388721498,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/658443a6-fca0-4ead-ba75-4c8b56fca81b {"id":"658443a6-fca0-4ead-ba75-4c8b56fca81b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130876} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2935edd1 658443a6-fca0-4ead-ba75-4c8b56fca81b /tidb/cdc/default/default/upstream/7362135986388721498 {"id":7362135986388721498,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Fri Apr 26 19:27:59 CST 2024] <<<<<< START kafka consumer in changefeed_finish case >>>>>> Avoid second fetch Checking out Revision cb9bc9e8822a01a5d59a2f670fb429e588065145 (origin/main) Commit message: "feat(tidb): test flashbacktest package (#2942)" * About to connect() to 127.0.0.1 port 24927 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 24927 (#0) > GET / HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:24927 > Accept: */* > < HTTP/1.1 403 Forbidden < Accept-Ranges: bytes < Content-Length: 226 < Content-Security-Policy: block-all-mixed-content < Content-Type: application/xml < Server: MinIO/RELEASE.2020-07-27T18-37-02Z < Vary: Origin < X-Amz-Request-Id: 17C9D0F40F5059AC < X-Xss-Protection: 1; mode=block < Date: Fri, 26 Apr 2024 11:27:59 GMT < { [data not shown] * Connection #0 to host 127.0.0.1 left intact check diff successfully table test.finish_mark exists check diff successfully Bucket 's3://logbucket/' created [Fri Apr 26 19:27:59 CST 2024] <<<<<< run test case consistent_replicate_gbk success! >>>>>> Exiting on signal: INTERRUPT VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a5e9300019 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-llqb6-jpndj, pid:4371, start at 2024-04-26 19:27:57.041483697 +0800 CST m=+5.182907908 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:29:57.048 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:27:57.004 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:17:57.004 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a5ec440016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-llqb6-jpndj, pid:4444, start at 2024-04-26 19:27:57.238453634 +0800 CST m=+5.322752418 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:29:57.245 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:27:57.201 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:17:57.201 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/batch_add_table/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/batch_add_table/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/batch_add_table/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/batch_add_table/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/batch_add_table/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } wait process cdc.test exit for 1-th time... > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f cb9bc9e8822a01a5d59a2f670fb429e588065145 # timeout=10 table ddl_manager.finish_mark not exists for 12-th check, retry later table mark.finish_mark_3 not exists for 16-th check, retry later check diff failed 1-th time, retry later wait process cdc.test exit for 2-th time... wait process cdc.test exit for 3-th time... split_and_random_merge scale: 40 check diff failed 1-th time, retry later cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Fri Apr 26 19:28:01 CST 2024] <<<<<< run test case kafka_simple_basic success! >>>>>> *************************** 1. row *************************** count(distinct region_id): 40 table ddl_manager.finish_mark not exists for 13-th check, retry later table mark.finish_mark_3 not exists for 17-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_nfs/run.sh using Sink-Type: kafka... <<================= [Fri Apr 26 19:28:03 CST 2024] <<<<<< run test case consistent_replicate_nfs success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 2-th time, retry later [Fri Apr 26 19:28:03 CST 2024] <<<<<< START cdc server in batch_add_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.batch_add_table.57985800.out server --log-file /tmp/tidb_cdc_test/batch_add_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/batch_add_table/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff failed 2-th time, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc) 3716673536 bytes in 7.52 secs (494118275 bytes/sec) [Pipeline] { [Pipeline] cache table mark.finish_mark_3 not exists for 18-th check, retry later check diff successfully check diff failed 3-th time, retry later table mark.finish_mark_3 not exists for 19-th check, retry later find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_storage_file/run.sh using Sink-Type: kafka... <<================= [Fri Apr 26 19:28:06 CST 2024] <<<<<< run test case consistent_replicate_storage_file success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 14-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:28:06 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/053af017-0249-43c0-8989-6070af74d79b {"id":"053af017-0249-43c0-8989-6070af74d79b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130883} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a294e5618 053af017-0249-43c0-8989-6070af74d79b /tidb/cdc/default/default/upstream/7362136019995151800 {"id":7362136019995151800,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/053af017-0249-43c0-8989-6070af74d79b {"id":"053af017-0249-43c0-8989-6070af74d79b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130883} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a294e5618 053af017-0249-43c0-8989-6070af74d79b /tidb/cdc/default/default/upstream/7362136019995151800 {"id":7362136019995151800,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/053af017-0249-43c0-8989-6070af74d79b {"id":"053af017-0249-43c0-8989-6070af74d79b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130883} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a294e5618 053af017-0249-43c0-8989-6070af74d79b /tidb/cdc/default/default/upstream/7362136019995151800 {"id":7362136019995151800,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.batch_add_table.cli.5859.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-batch-add-table-test-11643?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: 101a37f9-5f6a-4f4c-8fa7-a9ebac53bb57 Info: {"upstream_id":7362136019995151800,"namespace":"default","id":"101a37f9-5f6a-4f4c-8fa7-a9ebac53bb57","sink_uri":"kafka://127.0.0.1:9092/ticdc-batch-add-table-test-11643?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:28:06.765344132+08:00","start_ts":449349127138181126,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349127138181126,"checkpoint_ts":449349127138181126,"checkpoint_time":"2024-04-26 19:28:06.605"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... split_and_random_merge scale: 80 table ddl_manager.finish_mark not exists for 15-th check, retry later check diff failed 4-th time, retry later + set +x [Fri Apr 26 19:28:08 CST 2024] <<<<<< START kafka consumer in batch_add_table case >>>>>> table batch_add_table.finish_mark not exists for 1-th check, retry later table mark.finish_mark_3 not exists for 20-th check, retry later table ddl_manager.finish_mark not exists for 16-th check, retry later find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_storage_file_large_value/run.sh using Sink-Type: kafka... <<================= [Fri Apr 26 19:28:09 CST 2024] <<<<<< run test case consistent_replicate_storage_file_large_value success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 5-th time, retry later table mark.finish_mark_3 not exists for 21-th check, retry later table batch_add_table.finish_mark not exists for 2-th check, retry later table ddl_manager.finish_mark not exists for 17-th check, retry later check diff successfully table mark.finish_mark_3 not exists for 22-th check, retry later table batch_add_table.finish_mark exists check diff successfully cdc.test: no process found wait process cdc.test exit for 1-th time... process cdc.test already exit [Fri Apr 26 19:28:12 CST 2024] <<<<<< run test case batch_add_table success! >>>>>> find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_storage_s3/run.sh using Sink-Type: kafka... <<================= * About to connect() to 127.0.0.1 port 24927 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:24927; Connection refused * Closing connection 0 You are running an older version of MinIO released 3 years ago Update: Run `mc admin update` Attempting encryption of all config, IAM users and policies on MinIO backend *************************** 1. row *************************** count(distinct region_id): 3 check diff failed 1-th time, retry later Endpoint: http://127.0.0.1:24927 Object API (Amazon S3 compatible): Go: https://docs.min.io/docs/golang-client-quickstart-guide Java: https://docs.min.io/docs/java-client-quickstart-guide Python: https://docs.min.io/docs/python-client-quickstart-guide JavaScript: https://docs.min.io/docs/javascript-client-quickstart-guide .NET: https://docs.min.io/docs/dotnet-client-quickstart-guide table ddl_manager.finish_mark not exists for 18-th check, retry later table mark.finish_mark_3 not exists for 23-th check, retry later * About to connect() to 127.0.0.1 port 24927 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 24927 (#0) > GET / HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:24927 > Accept: */* > < HTTP/1.1 403 Forbidden < Accept-Ranges: bytes < Content-Length: 226 < Content-Security-Policy: block-all-mixed-content < Content-Type: application/xml < Server: MinIO/RELEASE.2020-07-27T18-37-02Z < Vary: Origin < X-Amz-Request-Id: 17C9D0F794EE7F56 < X-Xss-Protection: 1; mode=block < Date: Fri, 26 Apr 2024 11:28:14 GMT < { [data not shown] * Connection #0 to host 127.0.0.1 left intact Bucket 's3://logbucket/' created [Fri Apr 26 19:28:15 CST 2024] <<<<<< run test case consistent_replicate_storage_s3 success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Exiting on signal: INTERRUPT check diff failed 2-th time, retry later table ddl_manager.finish_mark not exists for 19-th check, retry later table mark.finish_mark_3 not exists for 24-th check, retry later table region_merge.t1 exists table ddl_manager.finish_mark not exists for 20-th check, retry later check diff failed 1-th time, retry later check diff failed 3-th time, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_partition_table/run.sh using Sink-Type: kafka... <<================= [Fri Apr 26 19:28:18 CST 2024] <<<<<< run test case consistent_partition_table success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table mark.finish_mark_3 not exists for 25-th check, retry later table ddl_manager.finish_mark not exists for 21-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_basic_avro/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc) 3716673536 bytes in 13.92 secs (266922656 bytes/sec) [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { check diff failed 4-th time, retry later [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { table mark.finish_mark_3 not exists for 26-th check, retry later check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... [Pipeline] sh find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_big_messages_v2/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... start tidb cluster in /tmp/tidb_cdc_test/kafka_simple_basic_avro Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:28:21 CST 2024] <<<<<< run test case region_merge success! >>>>>> [Pipeline] sh check diff successfully + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 table ddl_manager.finish_mark not exists for 22-th check, retry later table mark.finish_mark_3 not exists for 27-th check, retry later + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] withEnv [Pipeline] { [Pipeline] container [Pipeline] { Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [Pipeline] } check diff failed 1-th time, retry later [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] stage [Pipeline] { (Test) table ddl_manager.finish_mark not exists for 23-th check, retry later [Pipeline] // container [Pipeline] sh table mark.finish_mark_3 not exists for 28-th check, retry later [Pipeline] // container [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G00 start tidb cluster in /tmp/tidb_cdc_test/kafka_big_messages_v2 Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_rocks/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { Run cases: bdr_mode capture_suicide_while_balance_table syncpoint hang_sink_suicide server_config_compatibility kafka_big_messages kafka_compression kafka_messages kafka_sink_error_resume mq_sink_lost_callback mq_sink_dispatcher kafka_column_selector kafka_column_selector_avro debezium lossy_ddl storage_csv_update PROW_JOB_ID=586cc306-42d6-4207-90bf-aa4c613fe26b JENKINS_NODE_COOKIE=cc17afdc-7ad0-44c1-910e-917ed1e41fd8 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-52ncr-nbk6v HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1783813226958426112","prowjobid":"586cc306-42d6-4207-90bf-aa4c613fe26b","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","base_link":"https://github.com/pingcap/tiflow/commit/d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","pulls":[{"number":10904,"author":"CharlesCheung96","sha":"16f5d59f936001f6d7031387873b3c668f3c5ae6","title":"redo(ticdc): enable pprof and set memory limit for redo applier","link":"https://github.com/pingcap/tiflow/pull/10904","commit_link":"https://github.com/pingcap/tiflow/pull/10904/commits/16f5d59f936001f6d7031387873b3c668f3c5ae6","author_link":"https://github.com/CharlesCheung96"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1735 TEST_GROUP=G00 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1783813226958426112 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=618760b8f23481032f6d0010db684d46f840e8e1 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-52ncr GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-52ncr-nbk6v pingcap_tiflow_pull_cdc_integration_kafka_test_1735-52ncr GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-52ncr-nbk6v GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1735 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/bdr_mode/run.sh using Sink-Type: kafka... <<================= [Fri Apr 26 19:28:25 CST 2024] <<<<<< run test case bdr_mode success! >>>>>> Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G12 Run cases: many_pk_or_uk capture_session_done_during_task ddl_attributes PROW_JOB_ID=586cc306-42d6-4207-90bf-aa4c613fe26b JENKINS_NODE_COOKIE=2fec267e-8f27-4eb6-a450-2ee9f8fc8260 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-tmlzt-m5s5x HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1783813226958426112","prowjobid":"586cc306-42d6-4207-90bf-aa4c613fe26b","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","base_link":"https://github.com/pingcap/tiflow/commit/d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","pulls":[{"number":10904,"author":"CharlesCheung96","sha":"16f5d59f936001f6d7031387873b3c668f3c5ae6","title":"redo(ticdc): enable pprof and set memory limit for redo applier","link":"https://github.com/pingcap/tiflow/pull/10904","commit_link":"https://github.com/pingcap/tiflow/pull/10904/commits/16f5d59f936001f6d7031387873b3c668f3c5ae6","author_link":"https://github.com/CharlesCheung96"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1735 TEST_GROUP=G12 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1783813226958426112 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=618760b8f23481032f6d0010db684d46f840e8e1 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-tmlzt GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-tmlzt pingcap-tiflow-pull-cdc-integration-kafka-test-1735-tmlzt-m5s5x GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-tmlzt-m5s5x GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1735 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/many_pk_or_uk/run.sh using Sink-Type: kafka... <<================= [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 24-th check, retry later check diff failed 2-th time, retry later table mark.finish_mark_3 not exists for 29-th check, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release start tidb cluster in /tmp/tidb_cdc_test/multi_rocks Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 25-th check, retry later check diff failed 3-th time, retry later find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/capture_suicide_while_balance_table/run.sh using Sink-Type: kafka... <<================= [Fri Apr 26 19:28:28 CST 2024] <<<<<< run test case capture_suicide_while_balance_table success! >>>>>> The 1 times to try to start tidb cluster... table mark.finish_mark_3 not exists for 30-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 26-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 4-th time, retry later table mark.finish_mark_3 not exists for 31-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Debugger for raftstore-v2 is used Debugger for raftstore-v2 is used Debugger for raftstore-v2 is used Debugger for raftstore-v2 is used start tidb cluster in /tmp/tidb_cdc_test/many_pk_or_uk Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a7fb140004 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7vksj-r2cpw, pid:4807, start at 2024-04-26 19:28:30.919269788 +0800 CST m=+5.156422872 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:30:30.928 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:28:30.917 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:18:30.917 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a7fb140004 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7vksj-r2cpw, pid:4807, start at 2024-04-26 19:28:30.919269788 +0800 CST m=+5.156422872 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:30:30.928 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:28:30.917 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:18:30.917 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a7fc800015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7vksj-r2cpw, pid:4888, start at 2024-04-26 19:28:31.050113847 +0800 CST m=+5.226038178 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:30:31.059 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:28:31.058 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:18:31.058 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_simple_basic_avro/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_simple_basic_avro/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_simple_basic_avro/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_simple_basic_avro/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_simple_basic_avro/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table ddl_manager.finish_mark not exists for 27-th check, retry later table mark.finish_mark_3 not exists for 32-th check, retry later find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/syncpoint/run.sh using Sink-Type: kafka... <<================= kafka downstream isn't support syncpoint record [Fri Apr 26 19:28:31 CST 2024] <<<<<< run test case syncpoint success! >>>>>> check diff failed 5-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Fri Apr 26 19:28:34 CST 2024] <<<<<< START cdc server in kafka_simple_basic_avro case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_basic_avro.62296231.out server --log-file /tmp/tidb_cdc_test/kafka_simple_basic_avro/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_simple_basic_avro/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/common_1/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Verifying downstream PD is started... table ddl_manager.finish_mark not exists for 28-th check, retry later table mark.finish_mark_3 not exists for 33-th check, retry later check diff successfully Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/hang_sink_suicide/run.sh using Sink-Type: kafka... <<================= [Fri Apr 26 19:28:35 CST 2024] <<<<<< run test case hang_sink_suicide success! >>>>>> VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a830580013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-ww8h2-wvzdx, pid:2162, start at 2024-04-26 19:28:34.35429614 +0800 CST m=+5.188293003 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:30:34.361 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:28:34.326 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:18:34.326 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 1-th time, retry later table ddl_manager.finish_mark not exists for 29-th check, retry later check diff failed 1-th time, retry later check diff successfully table mark.finish_mark_3 not exists for 34-th check, retry later wait process cdc.test exit for 1-th time... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a830580013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-ww8h2-wvzdx, pid:2162, start at 2024-04-26 19:28:34.35429614 +0800 CST m=+5.188293003 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:30:34.361 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:28:34.326 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:18:34.326 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a8310c0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-ww8h2-wvzdx, pid:2236, start at 2024-04-26 19:28:34.398621259 +0800 CST m=+5.180811935 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:30:34.405 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:28:34.371 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:18:34.371 +0800 All versions after safe point can be accessed. (DO NOT EDIT) start tidb cluster in /tmp/tidb_cdc_test/common_1 Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/log/error.log arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/log/proxy.log"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } wait process cdc.test exit for 2-th time... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:28:37 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/28589e8a-6d05-47f9-99b8-6b256de79383 {"id":"28589e8a-6d05-47f9-99b8-6b256de79383","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130914} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a29da9dc8 28589e8a-6d05-47f9-99b8-6b256de79383 /tidb/cdc/default/default/upstream/7362136167834096626 {"id":7362136167834096626,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/28589e8a-6d05-47f9-99b8-6b256de79383 {"id":"28589e8a-6d05-47f9-99b8-6b256de79383","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130914} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a29da9dc8 28589e8a-6d05-47f9-99b8-6b256de79383 /tidb/cdc/default/default/upstream/7362136167834096626 {"id":7362136167834096626,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/28589e8a-6d05-47f9-99b8-6b256de79383 {"id":"28589e8a-6d05-47f9-99b8-6b256de79383","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130914} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a29da9dc8 28589e8a-6d05-47f9-99b8-6b256de79383 /tidb/cdc/default/default/upstream/7362136167834096626 {"id":7362136167834096626,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_basic_avro.cli.6290.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-simple-basic-avro-13378?protocol=simple&encoding-format=avro' --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_basic_avro/conf/changefeed.toml ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Create changefeed successfully! ID: 182c7d7a-4a0f-43f3-93ef-139ed8e555a7 Info: {"upstream_id":7362136167834096626,"namespace":"default","id":"182c7d7a-4a0f-43f3-93ef-139ed8e555a7","sink_uri":"kafka://127.0.0.1:9092/ticdc-simple-basic-avro-13378?protocol=simple\u0026encoding-format=avro","create_time":"2024-04-26T19:28:37.737047468+08:00","start_ts":449349135254683651,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"simple","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"correctness","corruption_handle_level":"error"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349135254683651,"checkpoint_ts":449349135254683651,"checkpoint_time":"2024-04-26 19:28:37.567"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:28:37 CST 2024] <<<<<< run test case kv_client_stream_reconnect success! >>>>>> table ddl_manager.finish_mark not exists for 30-th check, retry later find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/server_config_compatibility/run.sh using Sink-Type: kafka... <<================= [Fri Apr 26 19:28:38 CST 2024] <<<<<< run test case server_config_compatibility success! >>>>>> check diff failed 2-th time, retry later table mark.finish_mark_3 not exists for 35-th check, retry later + set +x ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Fri Apr 26 19:28:39 CST 2024] <<<<<< START cdc server in kafka_big_messages_v2 case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_big_messages_v2.36053607.out server --log-file /tmp/tidb_cdc_test/kafka_big_messages_v2/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_big_messages_v2/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table ddl_manager.finish_mark not exists for 31-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 3-th time, retry later table mark.finish_mark_3 not exists for 36-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 4-th time, retry later find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_big_messages/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table mark.finish_mark_3 not exists for 37-th check, retry later table ddl_manager.finish_mark not exists for 32-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a8ab04000e Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-tmlzt-m5s5x, pid:1477, start at 2024-04-26 19:28:42.190165184 +0800 CST m=+5.183106357 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:30:42.196 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:28:42.177 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:18:42.177 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:28:42 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/1bb00a56-97da-4440-a413-0dcdd52db771 {"id":"1bb00a56-97da-4440-a413-0dcdd52db771","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130919} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a29e7eecd 1bb00a56-97da-4440-a413-0dcdd52db771 /tidb/cdc/default/default/upstream/7362136183244681223 {"id":7362136183244681223,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/1bb00a56-97da-4440-a413-0dcdd52db771 {"id":"1bb00a56-97da-4440-a413-0dcdd52db771","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130919} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a29e7eecd 1bb00a56-97da-4440-a413-0dcdd52db771 /tidb/cdc/default/default/upstream/7362136183244681223 {"id":7362136183244681223,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/1bb00a56-97da-4440-a413-0dcdd52db771 {"id":"1bb00a56-97da-4440-a413-0dcdd52db771","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130919} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a29e7eecd 1bb00a56-97da-4440-a413-0dcdd52db771 /tidb/cdc/default/default/upstream/7362136183244681223 {"id":7362136183244681223,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: 9aef242b-805d-4924-aa8e-95350c712ece Info: {"upstream_id":7362136183244681223,"namespace":"default","id":"9aef242b-805d-4924-aa8e-95350c712ece","sink_uri":"kafka://127.0.0.1:9092/big-message-test?protocol=open-protocol\u0026partition-num=1\u0026kafka-version=2.4.1\u0026max-message-bytes=12582912","create_time":"2024-04-26T19:28:42.879190226+08:00","start_ts":449349135768223746,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349135768223746,"checkpoint_ts":449349135768223746,"checkpoint_time":"2024-04-26 19:28:39.526"} [Fri Apr 26 19:28:42 CST 2024] <<<<<< START kafka consumer in kafka_big_messages_v2 case >>>>>> Starting generate kafka big messages... go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading go.uber.org/atomic v1.11.0 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Fri Apr 26 19:28:44 CST 2024] <<<<<< START kafka consumer in kafka_simple_basic_avro case >>>>>> table mark.finish_mark_3 not exists for 38-th check, retry later table ddl_manager.finish_mark not exists for 33-th check, retry later check diff failed 5-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a8ab04000e Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-tmlzt-m5s5x, pid:1477, start at 2024-04-26 19:28:42.190165184 +0800 CST m=+5.183106357 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:30:42.196 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:28:42.177 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:18:42.177 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a8ac840014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-tmlzt-m5s5x, pid:1564, start at 2024-04-26 19:28:42.301921249 +0800 CST m=+5.238129701 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:30:42.309 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:28:42.273 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:18:42.273 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/many_pk_or_uk/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/many_pk_or_uk/tiflash/log/error.log arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/many_pk_or_uk/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/many_pk_or_uk/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/many_pk_or_uk/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } start tidb cluster in /tmp/tidb_cdc_test/kafka_big_messages Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 34-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc) 3716673536 bytes in 18.14 secs (204883088 bytes/sec) [Pipeline] { check diff successfully table mark.finish_mark_3 not exists for 39-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.many_pk_or_uk.cli.2870.out cli tso query --pd=http://127.0.0.1:2379 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 35-th check, retry later table kafka_big_messages.test not exists for 1-th check, retry later check diff failed 1-th time, retry later table mark.finish_mark_3 not exists for 40-th check, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [Pipeline] container [Pipeline] { VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a908240013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h32rl-nl1kg, pid:7116, start at 2024-04-26 19:28:48.155482297 +0800 CST m=+5.280290905 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:30:48.161 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:28:48.137 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:18:48.137 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] sh + set +x + tso='449349137904959490 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349137904959490 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Fri Apr 26 19:28:49 CST 2024] <<<<<< START cdc server in many_pk_or_uk case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) + GO_FAILPOINTS= + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.many_pk_or_uk.29042906.out server --log-file /tmp/tidb_cdc_test/many_pk_or_uk/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/many_pk_or_uk/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // container [Pipeline] sh ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 36-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/cdc/run.sh using Sink-Type: kafka... <<================= check diff failed 2-th time, retry later table mark.finish_mark_3 not exists for 41-th check, retry later + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G06 Run cases: sink_retry changefeed_error ddl_sequence resourcecontrol PROW_JOB_ID=586cc306-42d6-4207-90bf-aa4c613fe26b JENKINS_NODE_COOKIE=2a1a3c04-226a-4341-91b6-1b81222d4a12 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h8q8j-cjcsw HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1783813226958426112","prowjobid":"586cc306-42d6-4207-90bf-aa4c613fe26b","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","base_link":"https://github.com/pingcap/tiflow/commit/d0329d7f1ca9a1d0de81a565051a09fe7e9231bd","pulls":[{"number":10904,"author":"CharlesCheung96","sha":"16f5d59f936001f6d7031387873b3c668f3c5ae6","title":"redo(ticdc): enable pprof and set memory limit for redo applier","link":"https://github.com/pingcap/tiflow/pull/10904","commit_link":"https://github.com/pingcap/tiflow/pull/10904/commits/16f5d59f936001f6d7031387873b3c668f3c5ae6","author_link":"https://github.com/CharlesCheung96"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=cb9bc9e8822a01a5d59a2f670fb429e588065145 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1735 TEST_GROUP=G06 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1783813226958426112 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=618760b8f23481032f6d0010db684d46f840e8e1 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1735/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1735-h8q8j GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h8q8j-cjcsw pingcap_tiflow_pull_cdc_integration_kafka_test_1735-h8q8j GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h8q8j-cjcsw GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1735 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sink_retry/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table kafka_big_messages.test exists check diff failed 1-th time, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a908240013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h32rl-nl1kg, pid:7116, start at 2024-04-26 19:28:48.155482297 +0800 CST m=+5.280290905 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:30:48.161 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:28:48.137 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:18:48.137 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a90a5c0005 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h32rl-nl1kg, pid:7199, start at 2024-04-26 19:28:48.282860033 +0800 CST m=+5.357701202 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:30:48.289 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:28:48.279 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:18:48.279 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/common_1/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/common_1/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/common_1/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/common_1/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/common_1/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 37-th check, retry later check diff failed 3-th time, retry later table mark.finish_mark_3 not exists for 42-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:28:52 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3447bf8a-9245-4aeb-90a9-536ec233c034 {"id":"3447bf8a-9245-4aeb-90a9-536ec233c034","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130929} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2a027ed6 3447bf8a-9245-4aeb-90a9-536ec233c034 /tidb/cdc/default/default/upstream/7362136210511301979 {"id":7362136210511301979,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3447bf8a-9245-4aeb-90a9-536ec233c034 {"id":"3447bf8a-9245-4aeb-90a9-536ec233c034","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130929} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2a027ed6 3447bf8a-9245-4aeb-90a9-536ec233c034 /tidb/cdc/default/default/upstream/7362136210511301979 {"id":7362136210511301979,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3447bf8a-9245-4aeb-90a9-536ec233c034 {"id":"3447bf8a-9245-4aeb-90a9-536ec233c034","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130929} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2a027ed6 3447bf8a-9245-4aeb-90a9-536ec233c034 /tidb/cdc/default/default/upstream/7362136210511301979 {"id":7362136210511301979,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.many_pk_or_uk.cli.2963.out cli changefeed create --start-ts=449349137904959490 '--sink-uri=kafka://127.0.0.1:9092/ticdc-many-pk-or-uk-test-30076?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' The 1 times to try to start tidb cluster... Create changefeed successfully! ID: 076fb9de-5368-4d0a-b535-10b2cd36e757 Info: {"upstream_id":7362136210511301979,"namespace":"default","id":"076fb9de-5368-4d0a-b535-10b2cd36e757","sink_uri":"kafka://127.0.0.1:9092/ticdc-many-pk-or-uk-test-30076?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:28:52.714299377+08:00","start_ts":449349137904959490,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349137904959490,"checkpoint_ts":449349137904959490,"checkpoint_time":"2024-04-26 19:28:47.677"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff successfully wait process cdc.test exit for 1-th time... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) start tidb cluster in /tmp/tidb_cdc_test/sink_retry Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... wait process cdc.test exit for 2-th time... + set +x [Fri Apr 26 19:28:54 CST 2024] <<<<<< START kafka consumer in many_pk_or_uk case >>>>>> go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading go.uber.org/zap v1.27.0 go: downloading go.uber.org/atomic v1.11.0 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading github.com/coreos/go-semver v0.3.1 table mark.finish_mark_3 not exists for 43-th check, retry later cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:28:54 CST 2024] <<<<<< run test case kafka_big_messages_v2 success! >>>>>> go: downloading github.com/golang/protobuf v1.5.4 go: downloading golang.org/x/net v0.24.0 go: downloading google.golang.org/protobuf v1.33.0 go: downloading golang.org/x/sys v0.19.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda go: downloading golang.org/x/text v0.14.0 check diff failed 4-th time, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.common_1.cli.8602.out cli tso query --pd=http://127.0.0.1:2379 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 38-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/04/26 19:28:53.914 +08:00] [INFO] [pd_service_discovery.go:1016] ["[pd] switch leader"] [new-leader=http://127.0.0.1:2379] [old-leader=] [2024/04/26 19:28:53.914 +08:00] [INFO] [pd_service_discovery.go:498] ["[pd] init cluster id"] [cluster-id=7362135779042186529] [2024/04/26 19:28:53.915 +08:00] [INFO] [client.go:606] ["[pd] changing service mode"] [old-mode=UNKNOWN_SVC_MODE] [new-mode=PD_SVC_MODE] [2024/04/26 19:28:53.915 +08:00] [INFO] [tso_client.go:236] ["[tso] switch dc tso global allocator serving url"] [dc-location=global] [new-url=http://127.0.0.1:2379] [2024/04/26 19:28:53.915 +08:00] [INFO] [tso_dispatcher.go:359] ["[tso] tso dispatcher created"] [dc-location=global] [2024/04/26 19:28:53.915 +08:00] [INFO] [client.go:612] ["[pd] service mode changed"] [old-mode=UNKNOWN_SVC_MODE] [new-mode=PD_SVC_MODE] [2024/04/26 19:28:53.916 +08:00] [INFO] [pd_service_discovery.go:1016] ["[pd] switch leader"] [new-leader=http://127.0.0.1:2379] [old-leader=] [2024/04/26 19:28:53.916 +08:00] [INFO] [pd_service_discovery.go:498] ["[pd] init cluster id"] [cluster-id=7362135779042186529] [2024/04/26 19:28:53.917 +08:00] [INFO] [client.go:606] ["[pd] changing service mode"] [old-mode=UNKNOWN_SVC_MODE] [new-mode=PD_SVC_MODE] [2024/04/26 19:28:53.917 +08:00] [INFO] [tso_client.go:236] ["[tso] switch dc tso global allocator serving url"] [dc-location=global] [new-url=http://127.0.0.1:2379] [2024/04/26 19:28:53.917 +08:00] [INFO] [tso_dispatcher.go:359] ["[tso] tso dispatcher created"] [dc-location=global] [2024/04/26 19:28:53.917 +08:00] [INFO] [client.go:612] ["[pd] service mode changed"] [old-mode=UNKNOWN_SVC_MODE] [new-mode=PD_SVC_MODE] [2024/04/26 19:28:53.918 +08:00] [INFO] [tikv_driver.go:197] ["using API V1."] [2024/04/26 19:28:53.918 +08:00] [INFO] [main.go:180] ["genLock started"] [2024/04/26 19:28:53.920 +08:00] [INFO] [store_cache.go:477] ["change store resolve state"] [store=2] [addr=127.0.0.1:20162] [from=unresolved] [to=resolved] [liveness-state=reachable] [2024/04/26 19:28:53.929 +08:00] [INFO] [region_request.go:1688] ["send request meet region error without retry"] [req-ts=449349139531300885] [req-type=Prewrite] [region="{ region id: 24, ver: 58, confVer: 1 }"] [replica-read-type=leader] [stale-read=false] [request-sender="{rpcError:, replicaSelector: replicaSelectorV2{replicaReadType: leader, attempts: 2, cacheRegionIsValid: false, replicaStatus: [peer: 25, store: 2, isEpochStale: false, attempts: 2, replica-epoch: 0, store-epoch: 0, store-state: resolved, store-liveness-state: reachable]}}"] [retry-times=1] [total-backoff-ms=2] [total-backoff-times=1] [max-exec-timeout-ms=20000] [total-region-errors="25-read_index_not_ready:1, 25-epoch_not_match:1"] start tidb cluster in /tmp/tidb_cdc_test/cdc Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading golang.org/x/sync v0.7.0 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/google/btree v1.1.2 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/google/uuid v1.6.0 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/tidwall/btree v1.7.0 go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading github.com/stretchr/testify v1.9.0 go: downloading golang.org/x/tools v0.20.0 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading cloud.google.com/go v0.112.2 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading google.golang.org/api v0.170.0 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/golang/snappy v0.0.4 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading golang.org/x/time v0.5.0 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading golang.org/x/crypto v0.22.0 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table ddl_manager.finish_mark not exists for 39-th check, retry later go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading github.com/jfcg/sixb v1.3.8 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading cloud.google.com/go/compute v1.25.1 go: downloading github.com/robfig/cron v1.2.0 go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading github.com/apache/thrift v0.16.0 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/kr/text v0.2.0 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading github.com/klauspost/cpuid v1.3.1 check diff failed 5-th time, retry later table mark.finish_mark_3 not exists for 44-th check, retry later go: downloading github.com/go-logr/logr v1.4.1 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/go-logr/stdr v1.2.2 + set +x + tso='449349139965411329 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349139965411329 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Fri Apr 26 19:28:57 CST 2024] <<<<<< START cdc server in common_1 case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.common_1.86468648.out server --log-file /tmp/tidb_cdc_test/common_1/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/common_1/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) go: downloading github.com/DataDog/zstd v1.5.5 go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff successfully table mark.finish_mark_3 not exists for 45-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a985940012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-52ncr-nbk6v, pid:1823, start at 2024-04-26 19:28:56.199552698 +0800 CST m=+5.285213239 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:30:56.208 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:28:56.215 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:18:56.215 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a985940012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-52ncr-nbk6v, pid:1823, start at 2024-04-26 19:28:56.199552698 +0800 CST m=+5.285213239 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:30:56.208 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:28:56.215 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:18:56.215 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a987080013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-52ncr-nbk6v, pid:1902, start at 2024-04-26 19:28:56.291053686 +0800 CST m=+5.320784392 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:30:56.298 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:28:56.258 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:18:56.258 +0800 All versions after safe point can be accessed. (DO NOT EDIT) table ddl_manager.finish_mark not exists for 40-th check, retry later wait process cdc.test exit for 1-th time... Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_big_messages/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_big_messages/tiflash/log/error.log arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_big_messages/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_big_messages/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_big_messages/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } wait process cdc.test exit for 2-th time... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 3-th time... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:29:00 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9c5bb093-20d7-49d4-a046-f13d202ee96e {"id":"9c5bb093-20d7-49d4-a046-f13d202ee96e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130937} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2a15add9 9c5bb093-20d7-49d4-a046-f13d202ee96e /tidb/cdc/default/default/upstream/7362136240749523639 {"id":7362136240749523639,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9c5bb093-20d7-49d4-a046-f13d202ee96e {"id":"9c5bb093-20d7-49d4-a046-f13d202ee96e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130937} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2a15add9 9c5bb093-20d7-49d4-a046-f13d202ee96e /tidb/cdc/default/default/upstream/7362136240749523639 {"id":7362136240749523639,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9c5bb093-20d7-49d4-a046-f13d202ee96e {"id":"9c5bb093-20d7-49d4-a046-f13d202ee96e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130937} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2a15add9 9c5bb093-20d7-49d4-a046-f13d202ee96e /tidb/cdc/default/default/upstream/7362136240749523639 {"id":7362136240749523639,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: f29f61b4-41d3-493a-9d8f-574e53937d72 Info: {"upstream_id":7362136240749523639,"namespace":"default","id":"f29f61b4-41d3-493a-9d8f-574e53937d72","sink_uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-27852?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:29:00.347659487+08:00","start_ts":449349139965411329,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349139965411329,"checkpoint_ts":449349139965411329,"checkpoint_time":"2024-04-26 19:28:55.537"} [Fri Apr 26 19:29:00 CST 2024] <<<<<< START kafka consumer in common_1 case >>>>>> go: downloading github.com/jmespath/go-jmespath v0.4.0 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/google/s2a-go v0.1.7 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading github.com/felixge/httpsnoop v1.0.4 table mark.finish_mark_3 not exists for 46-th check, retry later cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Fri Apr 26 19:29:00 CST 2024] <<<<<< run test case changefeed_pause_resume success! >>>>>> go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd table ddl_manager.finish_mark not exists for 41-th check, retry later [Fri Apr 26 19:29:01 CST 2024] <<<<<< START cdc server in kafka_big_messages case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_big_messages.32663268.out server --log-file /tmp/tidb_cdc_test/kafka_big_messages/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_big_messages/cdc_data --cluster-id default + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Starting Upstream TiDB... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a9c6b00018 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-llqb6-jpndj, pid:7147, start at 2024-04-26 19:29:00.372820594 +0800 CST m=+26.742168663 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:31:00.380 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:29:00.382 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:19:00.382 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a9c6b00018 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-llqb6-jpndj, pid:7147, start at 2024-04-26 19:29:00.372820594 +0800 CST m=+26.742168663 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:31:00.380 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:29:00.382 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:19:00.382 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a898440003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-llqb6-jpndj, pid:7235, start at 2024-04-26 19:28:40.979613704 +0800 CST m=+7.281089808 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:30:40.986 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:28:40.977 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:18:40.977 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/multi_rocks/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/multi_rocks/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_rocks/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_rocks/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_rocks/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.finish_mark not exists for 1-th check, retry later ++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 221 100 221 0 0 2692 0 --:--:-- --:--:-- --:--:-- 2695 + synced_status='{"synced":true,"sink_checkpoint_ts":"2024-04-26 19:28:48.585","puller_resolved_ts":"2024-04-26 19:28:42.585","last_synced_ts":"2024-04-26 19:26:33.235","now_ts":"2024-04-26 19:28:50.000","info":"Data syncing is finished"}' ++ echo '{"synced":true,"sink_checkpoint_ts":"2024-04-26' '19:28:48.585","puller_resolved_ts":"2024-04-26' '19:28:42.585","last_synced_ts":"2024-04-26' '19:26:33.235","now_ts":"2024-04-26' '19:28:50.000","info":"Data' syncing is 'finished"}' ++ jq .synced + status=true + '[' true '!=' true ']' + kill_pd ++ ps aux ++ grep pd-server ++ grep /tmp/tidb_cdc_test/synced_status_with_redo + info='jenkins 10045 7.2 0.0 13521680 143176 ? Sl 19:26 0:11 pd-server --advertise-client-urls http://127.0.0.1:2379 --client-urls http://0.0.0.0:2379 --advertise-peer-urls http://127.0.0.1:2380 --peer-urls http://0.0.0.0:2380 --config /tmp/tidb_cdc_test/synced_status_with_redo/pd-config.toml --log-file /tmp/tidb_cdc_test/synced_status_with_redo/pd1.log --data-dir /tmp/tidb_cdc_test/synced_status_with_redo/pd1 --name=pd1 --initial-cluster=pd1=http://127.0.0.1:2380 jenkins 10106 4.8 0.0 13849296 137096 ? Sl 19:26 0:07 pd-server --advertise-client-urls http://127.0.0.1:2479 --client-urls http://0.0.0.0:2479 --advertise-peer-urls http://127.0.0.1:2480 --peer-urls http://0.0.0.0:2480 --config /tmp/tidb_cdc_test/synced_status_with_redo/pd-config.toml --log-file /tmp/tidb_cdc_test/synced_status_with_redo/down_pd.log --data-dir /tmp/tidb_cdc_test/synced_status_with_redo/down_pd' ++ ps aux ++ grep pd-server ++ grep /tmp/tidb_cdc_test/synced_status_with_redo ++ awk '{print $2}' ++ xargs kill -9 + sleep 20 {"level":"warn","ts":1714130937.4770505,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00318e700/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"info","ts":1714130937.4771123,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"} {"level":"warn","ts":1714130938.2961051,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc002396e00/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"info","ts":1714130938.2961638,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"} {"level":"warn","ts":"2024-04-26T19:28:59.170149+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001244000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-04-26T19:28:59.171842+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000f941c0/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-04-26T19:28:59.226182+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001135500/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""} table mark.finish_mark_3 not exists for 47-th check, retry later table test.finish_mark not exists for 2-th check, retry later table common_1.v1 not exists for 1-th check, retry later table ddl_manager.finish_mark not exists for 42-th check, retry later [Fri Apr 26 19:29:03 CST 2024] <<<<<< START cdc server in multi_rocks case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_rocks.2006520067.out server --log-file /tmp/tidb_cdc_test/multi_rocks/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_rocks/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [2024/04/26 19:29:03.936 +08:00] [INFO] [main.go:196] ["genLock done"] [2024/04/26 19:29:03.936 +08:00] [INFO] [pd_service_discovery.go:550] ["[pd] exit member loop due to context canceled"] [2024/04/26 19:29:03.936 +08:00] [INFO] [tso_dispatcher.go:214] ["exit tso requests cancel loop"] [2024/04/26 19:29:03.936 +08:00] [INFO] [resource_manager_client.go:295] ["[resource manager] exit resource token dispatcher"] [2024/04/26 19:29:03.936 +08:00] [INFO] [tso_dispatcher.go:268] ["exit tso dispatcher loop"] [2024/04/26 19:29:03.936 +08:00] [INFO] [tso_client.go:140] ["closing tso client"] [2024/04/26 19:29:03.936 +08:00] [INFO] [tso_dispatcher.go:455] ["[tso] stop fetching the pending tso requests due to context canceled"] [dc-location=global] [2024/04/26 19:29:03.936 +08:00] [INFO] [tso_dispatcher.go:380] ["[tso] exit tso dispatcher"] [dc-location=global] [2024/04/26 19:29:03.936 +08:00] [INFO] [tso_batch_controller.go:158] ["[pd] clear the tso batch controller"] [max-batch-size=10000] [best-batch-size=1] [collected-request-count=0] [pending-request-count=0] [2024/04/26 19:29:03.936 +08:00] [INFO] [tso_client.go:145] ["close tso client"] [2024/04/26 19:29:03.936 +08:00] [INFO] [tso_batch_controller.go:158] ["[pd] clear the tso batch controller"] [max-batch-size=10000] [best-batch-size=1] [collected-request-count=0] [pending-request-count=0] [2024/04/26 19:29:03.936 +08:00] [INFO] [tso_client.go:155] ["tso client is closed"] [2024/04/26 19:29:03.936 +08:00] [INFO] [pd_service_discovery.go:637] ["[pd] close pd service discovery client"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > table mark.finish_mark_3 exists table mark.finish_mark not exists for 1-th check, retry later table test.finish_mark not exists for 3-th check, retry later < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:29:04 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2e0a7c46-2040-45c3-bed0-d57319a98fe7 {"id":"2e0a7c46-2040-45c3-bed0-d57319a98fe7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130941} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2a36fdce 2e0a7c46-2040-45c3-bed0-d57319a98fe7 /tidb/cdc/default/default/upstream/7362136273556394232 {"id":7362136273556394232,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2e0a7c46-2040-45c3-bed0-d57319a98fe7 {"id":"2e0a7c46-2040-45c3-bed0-d57319a98fe7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130941} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2a36fdce 2e0a7c46-2040-45c3-bed0-d57319a98fe7 /tidb/cdc/default/default/upstream/7362136273556394232 {"id":7362136273556394232,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2e0a7c46-2040-45c3-bed0-d57319a98fe7 {"id":"2e0a7c46-2040-45c3-bed0-d57319a98fe7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130941} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2a36fdce 2e0a7c46-2040-45c3-bed0-d57319a98fe7 /tidb/cdc/default/default/upstream/7362136273556394232 {"id":7362136273556394232,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: 852695aa-cb2d-4b93-a7f3-d2a96c9b6566 Info: {"upstream_id":7362136273556394232,"namespace":"default","id":"852695aa-cb2d-4b93-a7f3-d2a96c9b6566","sink_uri":"kafka://127.0.0.1:9092/big-message-test?protocol=open-protocol\u0026partition-num=1\u0026kafka-version=2.4.1\u0026max-message-bytes=12582912","create_time":"2024-04-26T19:29:04.652448813+08:00","start_ts":449349141480341506,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349141480341506,"checkpoint_ts":449349141480341506,"checkpoint_time":"2024-04-26 19:29:01.316"} VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a9f4a40017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h8q8j-cjcsw, pid:1340, start at 2024-04-26 19:29:03.306933555 +0800 CST m=+5.140975537 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:31:03.315 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:29:03.273 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:19:03.273 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a9f4a40017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h8q8j-cjcsw, pid:1340, start at 2024-04-26 19:29:03.306933555 +0800 CST m=+5.140975537 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:31:03.315 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:29:03.273 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:19:03.273 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68a9f5580011 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h8q8j-cjcsw, pid:1421, start at 2024-04-26 19:29:03.332574211 +0800 CST m=+5.112224139 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:31:03.341 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:29:03.318 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:19:03.318 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/sink_retry/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/sink_retry/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/sink_retry/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/sink_retry/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/sink_retry/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [Fri Apr 26 19:29:04 CST 2024] <<<<<< START kafka consumer in kafka_big_messages case >>>>>> Starting generate kafka big messages... go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading go.uber.org/atomic v1.11.0 table common_1.v1 not exists for 2-th check, retry later table ddl_manager.finish_mark not exists for 43-th check, retry later {"level":"warn","ts":"2024-04-26T19:29:05.171453+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001244000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-04-26T19:29:05.172743+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000f941c0/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-04-26T19:29:05.227889+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001135500/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""} + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sink_retry.cli.2817.out cli tso query --pd=http://127.0.0.1:2379 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table common_1.v1 exists table common_1.recover_and_insert not exists for 1-th check, retry later table ddl_manager.finish_mark not exists for 44-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_tables_ddl_v2/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table mark.finish_mark exists check diff successfully table test.finish_mark not exists for 4-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:29:07 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f3b7c466-c3f2-4bab-9122-8f98236d6240 {"id":"f3b7c466-c3f2-4bab-9122-8f98236d6240","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130944} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a29f18802 f3b7c466-c3f2-4bab-9122-8f98236d6240 /tidb/cdc/default/default/upstream/7362136202391972540 {"id":7362136202391972540,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f3b7c466-c3f2-4bab-9122-8f98236d6240 {"id":"f3b7c466-c3f2-4bab-9122-8f98236d6240","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130944} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a29f18802 f3b7c466-c3f2-4bab-9122-8f98236d6240 /tidb/cdc/default/default/upstream/7362136202391972540 {"id":7362136202391972540,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f3b7c466-c3f2-4bab-9122-8f98236d6240 {"id":"f3b7c466-c3f2-4bab-9122-8f98236d6240","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130944} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a29f18802 f3b7c466-c3f2-4bab-9122-8f98236d6240 /tidb/cdc/default/default/upstream/7362136202391972540 {"id":7362136202391972540,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_rocks.cli.20346.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-multi-rocks-test-29069?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' wait process cdc.test exit for 1-th time... Create changefeed successfully! ID: 5fca320e-a2a1-414b-bb2f-637389b26ff7 Info: {"upstream_id":7362136202391972540,"namespace":"default","id":"5fca320e-a2a1-414b-bb2f-637389b26ff7","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-rocks-test-29069?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:29:07.590018405+08:00","start_ts":449349143096721410,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349143096721410,"checkpoint_ts":449349143096721410,"checkpoint_time":"2024-04-26 19:29:07.482"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:29:08 CST 2024] <<<<<< run test case default_value success! >>>>>> VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68aa33980008 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-cdv81-9zlwn, pid:4487, start at 2024-04-26 19:29:07.311729523 +0800 CST m=+5.211662821 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:31:07.318 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:29:07.302 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:19:07.302 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + tso='449349142923968513 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349142923968513 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x ***************** properties ***************** "threadcount"="2" "insertproportion"="0" "readallfields"="true" "mysql.user"="root" "mysql.port"="4000" "mysql.db"="sink_retry" "requestdistribution"="uniform" "workload"="core" "readproportion"="0" "updateproportion"="0" "recordcount"="10" "mysql.host"="127.0.0.1" "operationcount"="0" "dotransactions"="false" "scanproportion"="0" ********************************************** Run finished, takes 13.347418ms INSERT - Takes(s): 0.0, Count: 10, OPS: 1522.3, Avg(us): 2598, Min(us): 1219, Max(us): 6811, 95th(us): 7000, 99th(us): 7000 [Fri Apr 26 19:29:08 CST 2024] <<<<<< START cdc server in sink_retry case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/sink/dmlsink/txn/mysql/MySQLSinkTxnRandomError=25%return(true)' + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sink_retry.28692871.out server --log-file /tmp/tidb_cdc_test/sink_retry/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/sink_retry/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table common_1.recover_and_insert not exists for 2-th check, retry later table test.finish_mark not exists for 5-th check, retry later + set +x [Fri Apr 26 19:29:09 CST 2024] <<<<<< START kafka consumer in multi_rocks case >>>>>> table ddl_manager.finish_mark not exists for 45-th check, retry later ***************** properties ***************** "updateproportion"="0" "table"="a1" "readallfields"="true" "mysql.port"="4000" "threadcount"="2" "operationcount"="0" "recordcount"="1000" "workload"="core" "mysql.db"="multi_rocks" "scanproportion"="0" "insertproportion"="0" "requestdistribution"="uniform" "mysql.host"="127.0.0.1" "mysql.user"="root" "readproportion"="0" "dotransactions"="false" ********************************************** Run finished, takes 521.860483ms INSERT - Takes(s): 0.5, Count: 999, OPS: 1961.4, Avg(us): 1002, Min(us): 784, Max(us): 12414, 95th(us): 2000, 99th(us): 2000 ***************** properties ***************** "workload"="core" "requestdistribution"="uniform" "mysql.db"="multi_rocks" "table"="a2" "operationcount"="0" "insertproportion"="0" "dotransactions"="false" "updateproportion"="0" "readproportion"="0" "readallfields"="true" "mysql.host"="127.0.0.1" "recordcount"="1000" "threadcount"="2" "mysql.user"="root" "scanproportion"="0" "mysql.port"="4000" ********************************************** start tidb cluster in /tmp/tidb_cdc_test/multi_tables_ddl_v2 Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68aa33980008 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-cdv81-9zlwn, pid:4487, start at 2024-04-26 19:29:07.311729523 +0800 CST m=+5.211662821 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:31:07.318 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:29:07.302 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:19:07.302 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68aa34540004 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-cdv81-9zlwn, pid:4571, start at 2024-04-26 19:29:07.352468518 +0800 CST m=+5.202649263 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:31:07.359 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:29:07.349 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:19:07.349 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/cdc/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/cdc/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/cdc/tiflash/log/proxy.log"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/cdc/tiflash-proxy.toml"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/cdc/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Run finished, takes 575.088778ms INSERT - Takes(s): 0.6, Count: 1000, OPS: 1790.0, Avg(us): 1118, Min(us): 808, Max(us): 16362, 95th(us): 2000, 99th(us): 2000 ***************** properties ***************** "operationcount"="0" "mysql.port"="4000" "readproportion"="0" "workload"="core" "dotransactions"="false" "mysql.db"="multi_rocks" "updateproportion"="0" "requestdistribution"="uniform" "readallfields"="true" "threadcount"="2" "recordcount"="1000" "scanproportion"="0" "mysql.user"="root" "insertproportion"="0" "mysql.host"="127.0.0.1" "table"="a3" ********************************************** table test.finish_mark not exists for 6-th check, retry later ++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed Run finished, takes 518.773437ms INSERT - Takes(s): 0.5, Count: 1000, OPS: 1990.8, Avg(us): 1006, Min(us): 760, Max(us): 16402, 95th(us): 2000, 99th(us): 2000 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0{"level":"warn","ts":"2024-04-26T19:29:11.173476+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001244000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-04-26T19:29:11.174113+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000f941c0/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} table ddl_manager.finish_mark not exists for 46-th check, retry later table common_1.recover_and_insert not exists for 3-th check, retry later ***************** properties ***************** "table"="a4" "readproportion"="0" "mysql.host"="127.0.0.1" "mysql.user"="root" "threadcount"="2" "insertproportion"="0" "mysql.port"="4000" "mysql.db"="multi_rocks" "recordcount"="1000" "readallfields"="true" "scanproportion"="0" "updateproportion"="0" "requestdistribution"="uniform" "operationcount"="0" "workload"="core" "dotransactions"="false" ********************************************** {"level":"warn","ts":"2024-04-26T19:29:11.228556+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001135500/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""} table kafka_big_messages.test exists check diff failed 1-th time, retry later Run finished, takes 549.035662ms INSERT - Takes(s): 0.5, Count: 1000, OPS: 1879.9, Avg(us): 1067, Min(us): 789, Max(us): 17005, 95th(us): 2000, 99th(us): 2000 ***************** properties ***************** "mysql.db"="multi_rocks" "readproportion"="0" "requestdistribution"="uniform" "threadcount"="2" "mysql.port"="4000" "table"="a5" "readallfields"="true" "updateproportion"="0" "recordcount"="1000" "operationcount"="0" "scanproportion"="0" "mysql.user"="root" "workload"="core" "mysql.host"="127.0.0.1" "insertproportion"="0" "dotransactions"="false" ********************************************** + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:29:11 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e7299cce-9525-4ba2-bb44-a66c274f2c19 {"id":"e7299cce-9525-4ba2-bb44-a66c274f2c19","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130948} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2a5934e8 e7299cce-9525-4ba2-bb44-a66c274f2c19 /tidb/cdc/default/default/upstream/7362136308920571670 {"id":7362136308920571670,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e7299cce-9525-4ba2-bb44-a66c274f2c19 {"id":"e7299cce-9525-4ba2-bb44-a66c274f2c19","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130948} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2a5934e8 e7299cce-9525-4ba2-bb44-a66c274f2c19 /tidb/cdc/default/default/upstream/7362136308920571670 {"id":7362136308920571670,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e7299cce-9525-4ba2-bb44-a66c274f2c19 {"id":"e7299cce-9525-4ba2-bb44-a66c274f2c19","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130948} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2a5934e8 e7299cce-9525-4ba2-bb44-a66c274f2c19 /tidb/cdc/default/default/upstream/7362136308920571670 {"id":7362136308920571670,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sink_retry.cli.2924.out cli changefeed create --start-ts=449349142923968513 '--sink-uri=kafka://127.0.0.1:9092/ticdc-sink-retry-test-6241?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: f79d506c-dfbf-41ed-a6bb-d604bd7cbc6c Info: {"upstream_id":7362136308920571670,"namespace":"default","id":"f79d506c-dfbf-41ed-a6bb-d604bd7cbc6c","sink_uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-6241?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:29:12.076532311+08:00","start_ts":449349142923968513,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349142923968513,"checkpoint_ts":449349142923968513,"checkpoint_time":"2024-04-26 19:29:06.823"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... [Fri Apr 26 19:29:12 CST 2024] <<<<<< START cdc server in cdc case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cdc.59485950.out server --log-file /tmp/tidb_cdc_test/cdc/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/cdc/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info Run finished, takes 574.207548ms INSERT - Takes(s): 0.6, Count: 1000, OPS: 1789.2, Avg(us): 1116, Min(us): 762, Max(us): 15206, 95th(us): 2000, 99th(us): 2000 * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table test.finish_mark not exists for 7-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/cli/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table multi_rocks.finish_mark not exists for 1-th check, retry later table common_1.recover_and_insert exists table common_1.finish_mark not exists for 1-th check, retry later table ddl_manager.finish_mark not exists for 47-th check, retry later + set +x [Fri Apr 26 19:29:13 CST 2024] <<<<<< START kafka consumer in sink_retry case >>>>>> check diff failed 2-th time, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table common_1.finish_mark not exists for 2-th check, retry later table ddl_manager.finish_mark not exists for 48-th check, retry later table test.finish_mark not exists for 8-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/cli Starting Upstream PD... table multi_rocks.finish_mark not exists for 2-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:29:15 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/32af9517-b986-4f6b-afca-d1525d91b509 {"id":"32af9517-b986-4f6b-afca-d1525d91b509","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130952} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2a60becb 32af9517-b986-4f6b-afca-d1525d91b509 /tidb/cdc/default/default/upstream/7362136322187151810 {"id":7362136322187151810,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/32af9517-b986-4f6b-afca-d1525d91b509 {"id":"32af9517-b986-4f6b-afca-d1525d91b509","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130952} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2a60becb 32af9517-b986-4f6b-afca-d1525d91b509 /tidb/cdc/default/default/upstream/7362136322187151810 {"id":7362136322187151810,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/32af9517-b986-4f6b-afca-d1525d91b509 {"id":"32af9517-b986-4f6b-afca-d1525d91b509","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130952} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2a60becb 32af9517-b986-4f6b-afca-d1525d91b509 /tidb/cdc/default/default/upstream/7362136322187151810 {"id":7362136322187151810,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cdc.cli.5998.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-cdc-test-3373?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --config /tmp/tidb_cdc_test/cdc/pulsar_test.toml check diff successfully Create changefeed successfully! ID: 3108cd1a-5346-4055-b610-235b68d47cd2 Info: {"upstream_id":7362136322187151810,"namespace":"default","id":"3108cd1a-5346-4055-b610-235b68d47cd2","sink_uri":"kafka://127.0.0.1:9092/ticdc-cdc-test-3373?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:29:15.997587746+08:00","start_ts":449349145291128837,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349145291128837,"checkpoint_ts":449349145291128837,"checkpoint_time":"2024-04-26 19:29:15.853"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... table test.t2 not exists for 1-th check, retry later cdc.test: no process found wait process cdc.test exit for 1-th time... process cdc.test already exit [Fri Apr 26 19:29:16 CST 2024] <<<<<< run test case kafka_big_messages success! >>>>>> table common_1.finish_mark not exists for 3-th check, retry later table ddl_manager.finish_mark not exists for 49-th check, retry later table test.finish_mark not exists for 9-th check, retry later 0 0 0 0 0 0 0 0 --:--:-- 0:00:01 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:02 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:03 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:04 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:05 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:06 --:--:-- 0{"level":"warn","ts":"2024-04-26T19:29:17.174624+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001244000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-04-26T19:29:17.175477+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000f941c0/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-04-26T19:29:17.230013+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001135500/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""} table multi_rocks.finish_mark not exists for 3-th check, retry later + set +x [Fri Apr 26 19:29:17 CST 2024] <<<<<< START kafka consumer in cdc case >>>>>> go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading go.uber.org/atomic v1.11.0 table test.t2 not exists for 2-th check, retry later go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/coreos/go-semver v0.3.1 go: downloading github.com/golang/protobuf v1.5.4 go: downloading golang.org/x/net v0.24.0 go: downloading google.golang.org/protobuf v1.33.0 go: downloading golang.org/x/sys v0.19.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) go: downloading golang.org/x/text v0.14.0 table common_1.finish_mark not exists for 4-th check, retry later table test.finish_mark not exists for 10-th check, retry later table multi_rocks.finish_mark not exists for 4-th check, retry later 0 0 0 0 0 0 0 0 --:--:-- 0:00:07 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:08 --:--:-- 0{"level":"warn","ts":"2024-04-26T19:29:19.161151+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001244000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"info","ts":"2024-04-26T19:29:19.161213+0800","logger":"etcd-client","caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"} {"level":"warn","ts":"2024-04-26T19:29:19.162949+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000f941c0/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"info","ts":"2024-04-26T19:29:19.163004+0800","logger":"etcd-client","caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"} {"level":"warn","ts":"2024-04-26T19:29:19.218153+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001135500/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""} {"level":"info","ts":"2024-04-26T19:29:19.218202+0800","logger":"etcd-client","caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"} table ddl_manager.finish_mark not exists for 50-th check, retry later table test.t2 not exists for 3-th check, retry later go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/google/uuid v1.6.0 go: downloading github.com/docker/go-units v0.5.0 go: downloading golang.org/x/sync v0.7.0 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/stretchr/testify v1.9.0 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/tidwall/btree v1.7.0 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading google.golang.org/api v0.170.0 go: downloading cloud.google.com/go v0.112.2 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading golang.org/x/tools v0.20.0 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading github.com/google/btree v1.1.2 go: downloading golang.org/x/time v0.5.0 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/golang/snappy v0.0.4 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading golang.org/x/crypto v0.22.0 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading github.com/jfcg/sixb v1.3.8 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading github.com/robfig/cron v1.2.0 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/kr/text v0.2.0 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading github.com/apache/thrift v0.16.0 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading github.com/go-logr/logr v1.4.1 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/go-logr/stdr v1.2.2 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/simple/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... go: downloading github.com/DataDog/zstd v1.5.5 go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 table test.finish_mark not exists for 11-th check, retry later table multi_rocks.finish_mark exists check diff successfully table common_1.finish_mark exists check diff successfully table ddl_manager.finish_mark not exists for 51-th check, retry later table test.t2 not exists for 4-th check, retry later wait process cdc.test exit for 1-th time... wait process cdc.test exit for 1-th time... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68ab07d00015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-ww8h2-wvzdx, pid:5876, start at 2024-04-26 19:29:20.92125597 +0800 CST m=+5.288028787 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:31:20.928 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:29:20.934 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:19:20.934 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 2-th time... wait process cdc.test exit for 2-th time... wait process cdc.test exit for 3-th time... table test.finish_mark exists cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:29:22 CST 2024] <<<<<< run test case multi_rocks success! >>>>>> cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Fri Apr 26 19:29:22 CST 2024] <<<<<< run test case common_1 success! >>>>>> check diff successfully table ddl_manager.finish_mark not exists for 52-th check, retry later 0 0 0 0 0 0 0 0 --:--:-- 0:00:09 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:10 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:11 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:12 --:--:-- 0{"level":"warn","ts":"2024-04-26T19:29:23.176314+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001244000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-04-26T19:29:23.17651+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000f941c0/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-04-26T19:29:23.230731+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001135500/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""} start tidb cluster in /tmp/tidb_cdc_test/simple Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... go: downloading github.com/jmespath/go-jmespath v0.4.0 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/google/s2a-go v0.1.7 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading github.com/felixge/httpsnoop v1.0.4 table test.t2 not exists for 5-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68ab07d00015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-ww8h2-wvzdx, pid:5876, start at 2024-04-26 19:29:20.92125597 +0800 CST m=+5.288028787 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:31:20.928 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:29:20.934 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:19:20.934 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68ab07a40015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-ww8h2-wvzdx, pid:5964, start at 2024-04-26 19:29:20.915990512 +0800 CST m=+5.222619434 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:31:20.925 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:29:20.923 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:19:20.923 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd 0 0 0 0 0 0 0 0 --:--:-- 0:00:13 --:--:-- 0{"level":"warn","ts":1714130964.4871686,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc002f13180/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""} {"level":"info","ts":1714130964.4872127,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"} table ddl_manager.finish_mark not exists for 53-th check, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table test.t2 not exists for 6-th check, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [Fri Apr 26 19:29:26 CST 2024] <<<<<< START cdc server in multi_tables_ddl_v2 case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_tables_ddl_v2.74497451.out server --log-file /tmp/tidb_cdc_test/multi_tables_ddl_v2/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_tables_ddl_v2/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 54-th check, retry later table test.t2 not exists for 7-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... 0 0 0 0 0 0 0 0 --:--:-- 0:00:14 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:15 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:16 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:17 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:18 --:--:-- 0{"level":"warn","ts":"2024-04-26T19:29:29.177525+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001244000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-04-26T19:29:29.177604+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000f941c0/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-04-26T19:29:29.231413+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001135500/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""} table ddl_manager.finish_mark not exists for 55-th check, retry later table test.t2 not exists for 8-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:29:29 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/de8d1483-9508-4d82-9494-3c1da1c97e2d {"id":"de8d1483-9508-4d82-9494-3c1da1c97e2d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130966} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2a9598cc de8d1483-9508-4d82-9494-3c1da1c97e2d /tidb/cdc/default/default/upstream/7362136382432774099 {"id":7362136382432774099,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/de8d1483-9508-4d82-9494-3c1da1c97e2d {"id":"de8d1483-9508-4d82-9494-3c1da1c97e2d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130966} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2a9598cc de8d1483-9508-4d82-9494-3c1da1c97e2d /tidb/cdc/default/default/upstream/7362136382432774099 {"id":7362136382432774099,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/de8d1483-9508-4d82-9494-3c1da1c97e2d {"id":"de8d1483-9508-4d82-9494-3c1da1c97e2d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130966} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2a9598cc de8d1483-9508-4d82-9494-3c1da1c97e2d /tidb/cdc/default/default/upstream/7362136382432774099 {"id":7362136382432774099,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: test-normal Info: {"upstream_id":7362136382432774099,"namespace":"default","id":"test-normal","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-tables-ddl-test-normal-21937?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:29:29.358354948+08:00","start_ts":449349147960016897,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["multi_tables_ddl_test.t1","multi_tables_ddl_test.t2","multi_tables_ddl_test.t3","multi_tables_ddl_test.t4","multi_tables_ddl_test.t1_7","multi_tables_ddl_test.t2_7","multi_tables_ddl_test.finish_mark"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349147960016897,"checkpoint_ts":449349147960016897,"checkpoint_time":"2024-04-26 19:29:26.034"} Create changefeed successfully! ID: test-error-1 Info: {"upstream_id":7362136382432774099,"namespace":"default","id":"test-error-1","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-tables-ddl-test-error-1-30113?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:29:29.549770877+08:00","start_ts":449349147960016897,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["multi_tables_ddl_test.t5","multi_tables_ddl_test.t6","multi_tables_ddl_test.t7","multi_tables_ddl_test.t8"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349147960016897,"checkpoint_ts":449349147960016897,"checkpoint_time":"2024-04-26 19:29:26.034"} Create changefeed successfully! ID: test-error-2 Info: {"upstream_id":7362136382432774099,"namespace":"default","id":"test-error-2","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-tables-ddl-test-error-2-20625?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:29:29.75024672+08:00","start_ts":449349147960016897,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["multi_tables_ddl_test.t9","multi_tables_ddl_test.t10"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349147960016897,"checkpoint_ts":449349147960016897,"checkpoint_time":"2024-04-26 19:29:26.034"} [Fri Apr 26 19:29:29 CST 2024] <<<<<< START kafka consumer in multi_tables_ddl_v2 case >>>>>> [Fri Apr 26 19:29:29 CST 2024] <<<<<< START kafka consumer in multi_tables_ddl_v2 case >>>>>> [Fri Apr 26 19:29:29 CST 2024] <<<<<< START kafka consumer in multi_tables_ddl_v2 case >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 56-th check, retry later table test.t2 not exists for 9-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/kafka_compression Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) 0 0 0 0 0 0 0 0 --:--:-- 0:00:19 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:20 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:21 --:--:-- 0{"level":"warn","ts":1714130972.477891,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00318e700/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"info","ts":1714130972.4779437,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"} VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68abb2ec0013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb, pid:11324, start at 2024-04-26 19:29:31.867137412 +0800 CST m=+5.225480993 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:31:31.875 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:29:31.835 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:19:31.835 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 57-th check, retry later table multi_tables_ddl_test.t55 not exists for 1-th check, retry later 0 0 0 0 0 0 0 0 --:--:-- 0:00:22 --:--:-- 0{"level":"warn","ts":1714130973.2975416,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc002396e00/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"info","ts":1714130973.2975967,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"} table test.t2 not exists for 10-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68abc9d40014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0, pid:21151, start at 2024-04-26 19:29:33.347738079 +0800 CST m=+5.257988840 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:31:33.355 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:29:33.351 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:19:33.351 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) \033[0;36m<<< Run all test success >>>\033[0m [Pipeline] } VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68abb2ec0013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb, pid:11324, start at 2024-04-26 19:29:31.867137412 +0800 CST m=+5.225480993 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:31:31.875 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:29:31.835 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:19:31.835 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68abb2bc0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb, pid:11407, start at 2024-04-26 19:29:31.855759657 +0800 CST m=+5.161565181 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:31:31.864 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:29:31.823 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:19:31.823 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/cli/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/cli/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/cli/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/cli/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/cli/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc already exists) table test.finish_mark not exists for 1-th check, retry later [Pipeline] // cache \033[0;36m<<< Run all test success >>>\033[0m table ddl_manager.finish_mark not exists for 58-th check, retry later table multi_tables_ddl_test.t55 not exists for 2-th check, retry later [Pipeline] } Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [Pipeline] // dir [Pipeline] } [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc already exists) 0 0 0 0 0 0 0 0 --:--:-- 0:00:23 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:24 --:--:-- 0{"level":"warn","ts":"2024-04-26T19:29:35.178089+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001244000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-04-26T19:29:35.178372+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000f941c0/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-04-26T19:29:35.231826+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001135500/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""} [Pipeline] // withCredentials [Pipeline] // cache [Pipeline] } [Pipeline] } [Pipeline] // timeout [Pipeline] // dir [Pipeline] } [Pipeline] } [Pipeline] // stage table test.t2 not exists for 11-th check, retry later [Pipeline] // withCredentials [Pipeline] } [Pipeline] } [Pipeline] // container [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] // withEnv [Pipeline] // stage [Pipeline] } [Pipeline] } VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68abc9d40014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0, pid:21151, start at 2024-04-26 19:29:33.347738079 +0800 CST m=+5.257988840 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:31:33.355 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:29:33.351 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:19:33.351 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68abca880016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0, pid:21231, start at 2024-04-26 19:29:33.388920005 +0800 CST m=+5.244666369 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:31:33.395 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:29:33.396 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:19:33.396 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... [Pipeline] // node table test.finish_mark not exists for 2-th check, retry later [Pipeline] // container [Pipeline] } [Pipeline] } Logging trace to /tmp/tidb_cdc_test/simple/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/simple/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/simple/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/simple/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/simple/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [Pipeline] // podTemplate [Pipeline] // withEnv [Pipeline] } [Pipeline] } [Pipeline] // withEnv [Pipeline] // node [Pipeline] } [Pipeline] } table ddl_manager.finish_mark not exists for 59-th check, retry later table multi_tables_ddl_test.t55 not exists for 3-th check, retry later [Pipeline] // stage [Pipeline] // podTemplate Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] } [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli.cli.12806.out cli tso query --pd=http://127.0.0.1:2379 [Pipeline] } table test.t2 not exists for 12-th check, retry later table test.finish_mark not exists for 3-th check, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.simple.cli.22543.out cli tso query --pd=http://127.0.0.1:2379 + set +x + tso='449349150909399041 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349150909399041 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x table multi_tables_ddl_test.t55 not exists for 4-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Fri Apr 26 19:29:38 CST 2024] <<<<<< START cdc server in cli case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli.1285512857.out server --log-file /tmp/tidb_cdc_test/cli/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/cli/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table ddl_manager.finish_mark not exists for 60-th check, retry later table test.t2 exists check diff successfully table test.finish_mark not exists for 4-th check, retry later wait process cdc.test exit for 1-th time... + set +x + tso='449349151306809345 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349151306809345 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Fri Apr 26 19:29:40 CST 2024] <<<<<< START cdc server in simple case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.simple.2258422586.out server --log-file /tmp/tidb_cdc_test/simple/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/simple/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 0 0 0 0 0 0 0 0 --:--:-- 0:00:25 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:26 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:27 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:28 --:--:-- 0 0 0 0 0 0 0 0 0 --:--:-- 0:00:29 --:--:-- 0 100 135 100 135 0 0 4 0 0:00:33 0:00:30 0:00:03 27 100 135 100 135 0 0 4 0 0:00:33 0:00:30 0:00:03 34 + synced_status='{ "error_msg": "[CDC:ErrPDEtcdAPIError]etcd api call error: context deadline exceeded", "error_code": "CDC:ErrPDEtcdAPIError" }' ++ echo '{' '"error_msg":' '"[CDC:ErrPDEtcdAPIError]etcd' api call error: context deadline 'exceeded",' '"error_code":' '"CDC:ErrPDEtcdAPIError"' '}' ++ jq -r .error_code + error_code=CDC:ErrPDEtcdAPIError + cleanup_process cdc.test table multi_tables_ddl_test.t55 exists table multi_tables_ddl_test.t66 exists table multi_tables_ddl_test.t7 exists ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 2-th time... table multi_tables_ddl_test.t88 exists table multi_tables_ddl_test.finish_mark not exists for 1-th check, retry later wait process cdc.test exit for 3-th time... {"level":"warn","ts":"2024-04-26T19:29:41.179778+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001244000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-04-26T19:29:41.180049+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000f941c0/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} wait process cdc.test exit for 1-th time... {"level":"warn","ts":"2024-04-26T19:29:41.23281+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001135500/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""} table ddl_manager.finish_mark not exists for 61-th check, retry later check_changefeed_state http://127.0.0.1:2379 5373ff40-c1dc-4f99-b817-bec8573cb7c6 finished null + endpoints=http://127.0.0.1:2379 + changefeed_id=5373ff40-c1dc-4f99-b817-bec8573cb7c6 + expected_state=finished + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 5373ff40-c1dc-4f99-b817-bec8573cb7c6 -s + info='{ "upstream_id": 7362135986388721498, "namespace": "default", "id": "5373ff40-c1dc-4f99-b817-bec8573cb7c6", "state": "finished", "checkpoint_tso": 449349148778430469, "checkpoint_time": "2024-04-26 19:29:29.156", "error": null }' + echo '{ "upstream_id": 7362135986388721498, "namespace": "default", "id": "5373ff40-c1dc-4f99-b817-bec8573cb7c6", "state": "finished", "checkpoint_tso": 449349148778430469, "checkpoint_time": "2024-04-26 19:29:29.156", "error": null }' { "upstream_id": 7362135986388721498, "namespace": "default", "id": "5373ff40-c1dc-4f99-b817-bec8573cb7c6", "state": "finished", "checkpoint_tso": 449349148778430469, "checkpoint_time": "2024-04-26 19:29:29.156", "error": null } ++ echo '{' '"upstream_id":' 7362135986388721498, '"namespace":' '"default",' '"id":' '"5373ff40-c1dc-4f99-b817-bec8573cb7c6",' '"state":' '"finished",' '"checkpoint_tso":' 449349148778430469, '"checkpoint_time":' '"2024-04-26' '19:29:29.156",' '"error":' null '}' ++ jq -r .state + state=finished + [[ ! finished == \f\i\n\i\s\h\e\d ]] ++ echo '{' '"upstream_id":' 7362135986388721498, '"namespace":' '"default",' '"id":' '"5373ff40-c1dc-4f99-b817-bec8573cb7c6",' '"state":' '"finished",' '"checkpoint_tso":' 449349148778430469, '"checkpoint_time":' '"2024-04-26' '19:29:29.156",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... wait process cdc.test exit for 3-th time... cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Fri Apr 26 19:29:37 CST 2024] <<<<<< run test case changefeed_finish success! >>>>>> cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Fri Apr 26 19:29:41 CST 2024] <<<<<< run test case resolve_lock success! >>>>>> wait process cdc.test exit for 2-th time... table test.finish_mark exists check diff successfully cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit + stop_tidb_cluster + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:29:42 GMT < Content-Length: 859 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ee1eb730-35f5-40cc-a320-3855533569e3 {"id":"ee1eb730-35f5-40cc-a320-3855533569e3","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130979} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2ab4d63e ee1eb730-35f5-40cc-a320-3855533569e3 /tidb/cdc/default/default/upstream/7362136428262768517 {"id":7362136428262768517,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ee1eb730-35f5-40cc-a320-3855533569e3 {"id":"ee1eb730-35f5-40cc-a320-3855533569e3","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130979} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2ab4d63e ee1eb730-35f5-40cc-a320-3855533569e3 /tidb/cdc/default/default/upstream/7362136428262768517 {"id":7362136428262768517,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ee1eb730-35f5-40cc-a320-3855533569e3 {"id":"ee1eb730-35f5-40cc-a320-3855533569e3","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130979} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2ab4d63e ee1eb730-35f5-40cc-a320-3855533569e3 /tidb/cdc/default/default/upstream/7362136428262768517 {"id":7362136428262768517,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli.cli.12917.out cli changefeed create --start-ts=449349150909399041 '--sink-uri=kafka://127.0.0.1:9092/ticdc-cli-test-7715?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --tz=Asia/Shanghai -c=custom-changefeed-name [WARN] --tz is deprecated in changefeed settings. Create changefeed successfully! ID: custom-changefeed-name Info: {"upstream_id":7362136428262768517,"namespace":"default","id":"custom-changefeed-name","sink_uri":"kafka://127.0.0.1:9092/ticdc-cli-test-7715?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:29:42.492432316+08:00","start_ts":449349150909399041,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349150909399041,"checkpoint_ts":449349150909399041,"checkpoint_time":"2024-04-26 19:29:37.285"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68ac4fa40018 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-52ncr-nbk6v, pid:5578, start at 2024-04-26 19:29:41.912194964 +0800 CST m=+5.198346041 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:31:41.918 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:29:41.916 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:19:41.916 +0800 All versions after safe point can be accessed. (DO NOT EDIT) wait process cdc.test exit for 1-th time... table multi_tables_ddl_test.finish_mark not exists for 2-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68ac4fa40018 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-52ncr-nbk6v, pid:5578, start at 2024-04-26 19:29:41.912194964 +0800 CST m=+5.198346041 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:31:41.918 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:29:41.916 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:19:41.916 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68ac52b00015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-52ncr-nbk6v, pid:5663, start at 2024-04-26 19:29:42.090753833 +0800 CST m=+5.313120398 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:31:42.099 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:29:42.060 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:19:42.060 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_compression/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_compression/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_compression/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_compression/tiflash/log/proxy.log"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_compression/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table ddl_manager.finish_mark not exists for 62-th check, retry later wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:29:43 CST 2024] <<<<<< run test case kafka_simple_basic_avro success! >>>>>> + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:29:43 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9b53062d-a7d8-4725-ace3-488bbccbc05d {"id":"9b53062d-a7d8-4725-ace3-488bbccbc05d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130980} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2acc29ef 9b53062d-a7d8-4725-ace3-488bbccbc05d /tidb/cdc/default/default/upstream/7362136433783714521 {"id":7362136433783714521,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9b53062d-a7d8-4725-ace3-488bbccbc05d {"id":"9b53062d-a7d8-4725-ace3-488bbccbc05d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130980} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2acc29ef 9b53062d-a7d8-4725-ace3-488bbccbc05d /tidb/cdc/default/default/upstream/7362136433783714521 {"id":7362136433783714521,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9b53062d-a7d8-4725-ace3-488bbccbc05d {"id":"9b53062d-a7d8-4725-ace3-488bbccbc05d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130980} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2acc29ef 9b53062d-a7d8-4725-ace3-488bbccbc05d /tidb/cdc/default/default/upstream/7362136433783714521 {"id":7362136433783714521,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.simple.cli.22639.out cli changefeed create --start-ts=449349151306809345 '--sink-uri=kafka+ssl://127.0.0.1:9092/ticdc-simple-test-4290?protocol=open-protocol&partition-num=4&kafka-client-id=cdc_test_simple&kafka-version=2.4.1&max-message-bytes=10485760' + set +x [Fri Apr 26 19:29:43 CST 2024] <<<<<< START kafka consumer in cli case >>>>>> table test.simple not exists for 1-th check, retry later Create changefeed successfully! ID: aae644e8-736e-4e67-b446-3e436513a4cf Info: {"upstream_id":7362136433783714521,"namespace":"default","id":"aae644e8-736e-4e67-b446-3e436513a4cf","sink_uri":"kafka+ssl://127.0.0.1:9092/ticdc-simple-test-4290?protocol=open-protocol\u0026partition-num=4\u0026kafka-client-id=cdc_test_simple\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:29:44.027199035+08:00","start_ts":449349151306809345,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349151306809345,"checkpoint_ts":449349151306809345,"checkpoint_time":"2024-04-26 19:29:38.801"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... table multi_tables_ddl_test.finish_mark exists check table exists success + endpoints=http://127.0.0.1:2379 + changefeed_id=test-normal + expected_state=normal + error_msg=null + tls_dir= + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test-normal -s table ddl_manager.finish_mark not exists for 63-th check, retry later + info='{ "upstream_id": 7362136382432774099, "namespace": "default", "id": "test-normal", "state": "normal", "checkpoint_tso": 449349149703274535, "checkpoint_time": "2024-04-26 19:29:32.684", "error": null }' + echo '{ "upstream_id": 7362136382432774099, "namespace": "default", "id": "test-normal", "state": "normal", "checkpoint_tso": 449349149703274535, "checkpoint_time": "2024-04-26 19:29:32.684", "error": null }' { "upstream_id": 7362136382432774099, "namespace": "default", "id": "test-normal", "state": "normal", "checkpoint_tso": 449349149703274535, "checkpoint_time": "2024-04-26 19:29:32.684", "error": null } ++ echo '{' '"upstream_id":' 7362136382432774099, '"namespace":' '"default",' '"id":' '"test-normal",' '"state":' '"normal",' '"checkpoint_tso":' 449349149703274535, '"checkpoint_time":' '"2024-04-26' '19:29:32.684",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7362136382432774099, '"namespace":' '"default",' '"id":' '"test-normal",' '"state":' '"normal",' '"checkpoint_tso":' 449349149703274535, '"checkpoint_time":' '"2024-04-26' '19:29:32.684",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] + endpoints=http://127.0.0.1:2379 + changefeed_id=test-error-1 + expected_state=normal + error_msg=null + tls_dir= + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test-error-1 -s + set +x [Fri Apr 26 19:29:45 CST 2024] <<<<<< START kafka consumer in simple case >>>>>> succeed to verify meta placement rules [Fri Apr 26 19:29:45 CST 2024] <<<<<< START cdc server in kafka_compression case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.70717073.out server --log-file /tmp/tidb_cdc_test/kafka_compression/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_compression/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + info='{ "upstream_id": 7362136382432774099, "namespace": "default", "id": "test-error-1", "state": "normal", "checkpoint_tso": 449349152849002499, "checkpoint_time": "2024-04-26 19:29:44.684", "error": null }' + echo '{ "upstream_id": 7362136382432774099, "namespace": "default", "id": "test-error-1", "state": "normal", "checkpoint_tso": 449349152849002499, "checkpoint_time": "2024-04-26 19:29:44.684", "error": null }' { "upstream_id": 7362136382432774099, "namespace": "default", "id": "test-error-1", "state": "normal", "checkpoint_tso": 449349152849002499, "checkpoint_time": "2024-04-26 19:29:44.684", "error": null } ++ echo '{' '"upstream_id":' 7362136382432774099, '"namespace":' '"default",' '"id":' '"test-error-1",' '"state":' '"normal",' '"checkpoint_tso":' 449349152849002499, '"checkpoint_time":' '"2024-04-26' '19:29:44.684",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7362136382432774099, '"namespace":' '"default",' '"id":' '"test-error-1",' '"state":' '"normal",' '"checkpoint_tso":' 449349152849002499, '"checkpoint_time":' '"2024-04-26' '19:29:44.684",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] + endpoints=http://127.0.0.1:2379 + changefeed_id=test-error-2 + expected_state=failed + error_msg=ErrSyncRenameTableFailed + tls_dir= + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test-error-2 -s ERROR 1146 (42S02) at line 1: Table 'test.simple1' doesn't exist check data failed 1-th time, retry later + info='{ "upstream_id": 7362136382432774099, "namespace": "default", "id": "test-error-2", "state": "failed", "checkpoint_tso": 449349149244522506, "checkpoint_time": "2024-04-26 19:29:30.934", "error": { "time": "2024-04-26T19:29:32.743947781+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSyncRenameTableFailed", "message": "[CDC:ErrSyncRenameTableFailed]table'\''s old name is not in filter rule, and its new name in filter rule table id '\''130'\'', ddl query: [rename table t11 to t9], it'\''s an unexpected behavior, if you want to replicate this table, please add its old name to filter rule." } }' + echo '{ "upstream_id": 7362136382432774099, "namespace": "default", "id": "test-error-2", "state": "failed", "checkpoint_tso": 449349149244522506, "checkpoint_time": "2024-04-26 19:29:30.934", "error": { "time": "2024-04-26T19:29:32.743947781+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSyncRenameTableFailed", "message": "[CDC:ErrSyncRenameTableFailed]table'\''s old name is not in filter rule, and its new name in filter rule table id '\''130'\'', ddl query: [rename table t11 to t9], it'\''s an unexpected behavior, if you want to replicate this table, please add its old name to filter rule." } }' { "upstream_id": 7362136382432774099, "namespace": "default", "id": "test-error-2", "state": "failed", "checkpoint_tso": 449349149244522506, "checkpoint_time": "2024-04-26 19:29:30.934", "error": { "time": "2024-04-26T19:29:32.743947781+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSyncRenameTableFailed", "message": "[CDC:ErrSyncRenameTableFailed]table's old name is not in filter rule, and its new name in filter rule table id '130', ddl query: [rename table t11 to t9], it's an unexpected behavior, if you want to replicate this table, please add its old name to filter rule." } } ++ jq -r .state ++ echo '{' '"upstream_id":' 7362136382432774099, '"namespace":' '"default",' '"id":' '"test-error-2",' '"state":' '"failed",' '"checkpoint_tso":' 449349149244522506, '"checkpoint_time":' '"2024-04-26' '19:29:30.934",' '"error":' '{' '"time":' '"2024-04-26T19:29:32.743947781+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrSyncRenameTableFailed",' '"message":' '"[CDC:ErrSyncRenameTableFailed]table'\''s' old name is not in filter rule, and its new name in filter rule table id ''\''130'\'',' ddl query: '[rename' table t11 to 't9],' 'it'\''s' an unexpected behavior, if you want to replicate this table, please add its old name to filter 'rule."' '}' '}' + state=failed + [[ ! failed == \f\a\i\l\e\d ]] ++ jq -r .error.message ++ echo '{' '"upstream_id":' 7362136382432774099, '"namespace":' '"default",' '"id":' '"test-error-2",' '"state":' '"failed",' '"checkpoint_tso":' 449349149244522506, '"checkpoint_time":' '"2024-04-26' '19:29:30.934",' '"error":' '{' '"time":' '"2024-04-26T19:29:32.743947781+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrSyncRenameTableFailed",' '"message":' '"[CDC:ErrSyncRenameTableFailed]table'\''s' old name is not in filter rule, and its new name in filter rule table id ''\''130'\'',' ddl query: '[rename' table t11 to 't9],' 'it'\''s' an unexpected behavior, if you want to replicate this table, please add its old name to filter 'rule."' '}' '}' + message='[CDC:ErrSyncRenameTableFailed]table'\''s old name is not in filter rule, and its new name in filter rule table id '\''130'\'', ddl query: [rename table t11 to t9], it'\''s an unexpected behavior, if you want to replicate this table, please add its old name to filter rule.' + [[ ! [CDC:ErrSyncRenameTableFailed]table's old name is not in filter rule, and its new name in filter rule table id '130', ddl query: [rename table t11 to t9], it's an unexpected behavior, if you want to replicate this table, please add its old name to filter rule. =~ ErrSyncRenameTableFailed ]] check diff successfully table sink_retry.finish_mark_1 exists check diff successfully ***************** properties ***************** "dotransactions"="false" "operationcount"="0" "scanproportion"="0" "mysql.db"="sink_retry" "mysql.user"="root" "updateproportion"="0" "readallfields"="true" "recordcount"="10" "insertproportion"="0" "readproportion"="0" "mysql.host"="127.0.0.1" "threadcount"="2" "mysql.port"="4000" "requestdistribution"="uniform" "workload"="core" ********************************************** Run finished, takes 4.222273ms INSERT - Takes(s): 0.0, Count: 10, OPS: 3228.2, Avg(us): 735, Min(us): 492, Max(us): 1696, 95th(us): 2000, 99th(us): 2000 wait process cdc.test exit for 1-th time... table test.simple not exists for 2-th check, retry later cdc.test: no process found wait process cdc.test exit for 2-th time... process cdc.test already exit [Fri Apr 26 19:29:46 CST 2024] <<<<<< run test case multi_tables_ddl_v2 success! >>>>>> table ddl_manager.finish_mark not exists for 64-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > ERROR 1146 (42S02) at line 1: Table 'test.simple1' doesn't exist check data failed 2-th time, retry later < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:29:48 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6a9870eb-8522-483e-908e-4171469b6978 {"id":"6a9870eb-8522-483e-908e-4171469b6978","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130985} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2aebdbc3 6a9870eb-8522-483e-908e-4171469b6978 /tidb/cdc/default/default/upstream/7362136474245782833 {"id":7362136474245782833,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6a9870eb-8522-483e-908e-4171469b6978 {"id":"6a9870eb-8522-483e-908e-4171469b6978","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130985} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2aebdbc3 6a9870eb-8522-483e-908e-4171469b6978 /tidb/cdc/default/default/upstream/7362136474245782833 {"id":7362136474245782833,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6a9870eb-8522-483e-908e-4171469b6978 {"id":"6a9870eb-8522-483e-908e-4171469b6978","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714130985} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2aebdbc3 6a9870eb-8522-483e-908e-4171469b6978 /tidb/cdc/default/default/upstream/7362136474245782833 {"id":7362136474245782833,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7125.out cli tso query --pd=http://127.0.0.1:2379 table test.simple exists table test.`simple-dash` exists + endpoints=http://127.0.0.1:2379 + changefeed_id=custom-changefeed-name + expected_state=normal + error_msg=null + tls_dir= + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c custom-changefeed-name -s + info='{ "upstream_id": 7362136428262768517, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449349153635696647, "checkpoint_time": "2024-04-26 19:29:47.685", "error": null }' + echo '{ "upstream_id": 7362136428262768517, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449349153635696647, "checkpoint_time": "2024-04-26 19:29:47.685", "error": null }' { "upstream_id": 7362136428262768517, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449349153635696647, "checkpoint_time": "2024-04-26 19:29:47.685", "error": null } ++ echo '{' '"upstream_id":' 7362136428262768517, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449349153635696647, '"checkpoint_time":' '"2024-04-26' '19:29:47.685",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7362136428262768517, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449349153635696647, '"checkpoint_time":' '"2024-04-26' '19:29:47.685",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] changefeed count 1 check pass, pd_addr: http://127.0.0.1:2379 changefeed count 1 check pass, pd_addr: http://127.0.0.1:2679 changefeed count 1 check pass, pd_addr: http://127.0.0.1:2779 changefeed count 1 check pass, pd_addr: http://127.0.0.1:2379,http://127.0.0.1:2679,http://127.0.0.1:2779 table ddl_manager.finish_mark not exists for 65-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/force_replicate_table/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Error: [CDC:ErrChangefeedUpdateRefused]changefeed update error: can only update changefeed config when it is stopped or failed update changefeed config should fail when changefeed is running, got Diff of changefeed config: {Type:update Path:[Config CaseSensitive] From:false To:true} {Type:update Path:[Config SyncPointInterval] From: To:0xc003e1d760} {Type:update Path:[Config SyncPointRetention] From: To:0xc003e1d768} {Type:update Path:[Config Consistent] From: To:0xc00123a460} {Type:update Path:[Config Scheduler EnableTableAcrossNodes] From:false To:true} + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli.cli.13331.out cli changefeed --changefeed-id custom-changefeed-name pause TEST FAILED: OUTPUT DOES NOT CONTAIN 'id: 1' ____________________________________ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ check data failed 3-th time, retry later + set +x + tso='449349153840431108 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349153840431108 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7163.out cli changefeed create --start-ts=449349153840431108 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-gzip-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=gzip' -c gzip Create changefeed successfully! ID: gzip Info: {"upstream_id":7362136474245782833,"namespace":"default","id":"gzip","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-gzip-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=gzip","create_time":"2024-04-26T19:29:50.396736578+08:00","start_ts":449349153840431108,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349153840431108,"checkpoint_ts":449349153840431108,"checkpoint_time":"2024-04-26 19:29:48.466"} PASS PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... coverage: 2.4% of statements in github.com/pingcap/tiflow/... table ddl_manager.finish_mark not exists for 66-th check, retry later + set +x check data successfully + set +x [Fri Apr 26 19:29:51 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>> + run_case_with_unavailable_tikv conf/changefeed-redo.toml + rm -rf /tmp/tidb_cdc_test/synced_status_with_redo + mkdir -p /tmp/tidb_cdc_test/synced_status_with_redo + start_tidb_cluster --workdir /tmp/tidb_cdc_test/synced_status_with_redo shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory The 1 times to try to start tidb cluster... shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory start tidb cluster in /tmp/tidb_cdc_test/force_replicate_table Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... wait process cdc.test exit for 1-th time... /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/04/26 19:29:50.353 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/04/26 19:29:50.391 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/04/26 19:29:50.511 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/04/26 19:29:50.522 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/04/26 19:29:51.487 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/04/26 19:29:51.497 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"]: syntax error: operand expected (error token is "[2024/04/26 19:29:50.353 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/04/26 19:29:50.391 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/04/26 19:29:50.511 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/04/26 19:29:50.522 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/04/26 19:29:51.487 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/04/26 19:29:51.497 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"]") table test.gzip_finish_mark not exists for 1-th check, retry later wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:29:53 CST 2024] <<<<<< run test case simple success! >>>>>> table ddl_manager.finish_mark not exists for 67-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/move_table/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Verifying downstream PD is started... + endpoints=http://127.0.0.1:2379 + changefeed_id=custom-changefeed-name + expected_state=stopped + error_msg=null + tls_dir= + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c custom-changefeed-name -s + info='{ "upstream_id": 7362136428262768517, "namespace": "default", "id": "custom-changefeed-name", "state": "stopped", "checkpoint_tso": 449349154160246789, "checkpoint_time": "2024-04-26 19:29:49.686", "error": null }' + echo '{ "upstream_id": 7362136428262768517, "namespace": "default", "id": "custom-changefeed-name", "state": "stopped", "checkpoint_tso": 449349154160246789, "checkpoint_time": "2024-04-26 19:29:49.686", "error": null }' { "upstream_id": 7362136428262768517, "namespace": "default", "id": "custom-changefeed-name", "state": "stopped", "checkpoint_tso": 449349154160246789, "checkpoint_time": "2024-04-26 19:29:49.686", "error": null } ++ echo '{' '"upstream_id":' 7362136428262768517, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"stopped",' '"checkpoint_tso":' 449349154160246789, '"checkpoint_time":' '"2024-04-26' '19:29:49.686",' '"error":' null '}' ++ jq -r .state + state=stopped + [[ ! stopped == \s\t\o\p\p\e\d ]] ++ echo '{' '"upstream_id":' 7362136428262768517, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"stopped",' '"checkpoint_tso":' 449349154160246789, '"checkpoint_time":' '"2024-04-26' '19:29:49.686",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli.cli.13435.out cli changefeed update --pd=http://127.0.0.1:2379,http://127.0.0.1:2679,http://127.0.0.1:2779 --config=/tmp/tidb_cdc_test/cli/changefeed.toml --no-confirm --changefeed-id custom-changefeed-name table test.gzip_finish_mark not exists for 2-th check, retry later Diff of changefeed config: {Type:update Path:[Config CaseSensitive] From:false To:true} {Type:update Path:[Config SyncPointInterval] From: To:0xc001786a98} {Type:update Path:[Config SyncPointRetention] From: To:0xc001786aa8} {Type:update Path:[Config Consistent] From: To:0xc00130e8c0} {Type:update Path:[Config Scheduler EnableTableAcrossNodes] From:false To:true} Update changefeed config successfully! ID: custom-changefeed-name Info: {"upstream_id":7362136428262768517,"namespace":"default","id":"custom-changefeed-name","sink_uri":"kafka://127.0.0.1:9092/ticdc-cli-test-7715?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:29:42.492432316+08:00","start_ts":449349150909399041,"admin_job_type":1,"config":{"memory_quota":1073741824,"case_sensitive":true,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_table_monitor":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","encoder_concurrency":32,"terminator":"\r\n","enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":true,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"stopped","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":0,"checkpoint_ts":449349154160246789,"checkpoint_time":"2024-04-26 19:29:49.686"} PASS coverage: 2.8% of statements in github.com/pingcap/tiflow/... chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory start tidb cluster in /tmp/tidb_cdc_test/synced_status_with_redo Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... table ddl_manager.finish_mark not exists for 68-th check, retry later Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + set +x start tidb cluster in /tmp/tidb_cdc_test/move_table Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... table test.gzip_finish_mark exists check diff successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7294.out cli changefeed pause -c gzip + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli.cli.13465.out cli changefeed --changefeed-id custom-changefeed-name resume PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... PASS coverage: 2.1% of statements in github.com/pingcap/tiflow/... table ddl_manager.finish_mark not exists for 69-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7325.out cli changefeed remove -c gzip =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_topics_v2/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... + set +x start tidb cluster in /tmp/tidb_cdc_test/kafka_simple_handle_key_only Starting Upstream PD... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... Changefeed remove successfully. ID: gzip CheckpointTs: 449349156015964165 SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-gzip-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=gzip PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table ddl_manager.finish_mark not exists for 70-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7357.out cli tso query --pd=http://127.0.0.1:2379 Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) start tidb cluster in /tmp/tidb_cdc_test/multi_topics_v2 Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 71-th check, retry later + endpoints=http://127.0.0.1:2379 + changefeed_id=custom-changefeed-name + expected_state=normal + error_msg=null + tls_dir= + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c custom-changefeed-name -s + info='{ "upstream_id": 7362136428262768517, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449349157319081985, "checkpoint_time": "2024-04-26 19:30:01.736", "error": null }' + echo '{ "upstream_id": 7362136428262768517, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449349157319081985, "checkpoint_time": "2024-04-26 19:30:01.736", "error": null }' { "upstream_id": 7362136428262768517, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449349157319081985, "checkpoint_time": "2024-04-26 19:30:01.736", "error": null } ++ echo '{' '"upstream_id":' 7362136428262768517, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449349157319081985, '"checkpoint_time":' '"2024-04-26' '19:30:01.736",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7362136428262768517, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449349157319081985, '"checkpoint_time":' '"2024-04-26' '19:30:01.736",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli.cli.13559.out cli changefeed --changefeed-id custom-changefeed-name remove ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + tso='449349157234933764 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349157234933764 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7393.out cli changefeed create --start-ts=449349157234933764 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-snappy-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=snappy' -c snappy Create changefeed successfully! ID: snappy Info: {"upstream_id":7362136474245782833,"namespace":"default","id":"snappy","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-snappy-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=snappy","create_time":"2024-04-26T19:30:03.340316699+08:00","start_ts":449349157234933764,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349157234933764,"checkpoint_ts":449349157234933764,"checkpoint_time":"2024-04-26 19:30:01.415"} PASS Changefeed remove successfully. ID: custom-changefeed-name CheckpointTs: 449349157319081985 SinkURI: kafka://127.0.0.1:9092/ticdc-cli-test-7715?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) coverage: 2.4% of statements in github.com/pingcap/tiflow/... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 72-th check, retry later + set +x ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x [Fri Apr 26 19:30:04 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>> Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/cdc_server_tips/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/04/26 19:30:03.304 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/04/26 19:30:03.336 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/04/26 19:30:03.438 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/04/26 19:30:03.447 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/04/26 19:30:04.437 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/04/26 19:30:04.446 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"]: syntax error: operand expected (error token is "[2024/04/26 19:30:03.304 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/04/26 19:30:03.336 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/04/26 19:30:03.438 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/04/26 19:30:03.447 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/04/26 19:30:04.437 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/04/26 19:30:04.446 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"]") table test.snappy_finish_mark not exists for 1-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 73-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68ada4e00019 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-1hp4d-3x2xd, pid:4455, start at 2024-04-26 19:30:03.744406646 +0800 CST m=+5.259282544 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:03.753 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:03.754 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:03.754 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68ada4e00019 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-1hp4d-3x2xd, pid:4455, start at 2024-04-26 19:30:03.744406646 +0800 CST m=+5.259282544 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:03.753 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:03.754 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:03.754 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68ada6440003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-1hp4d-3x2xd, pid:4543, start at 2024-04-26 19:30:03.796177747 +0800 CST m=+5.260877757 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:03.802 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:03.793 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:03.793 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/force_replicate_table/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/force_replicate_table/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/force_replicate_table/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/force_replicate_table/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/force_replicate_table/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68add3c40018 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z9nq8-r4p88, pid:23487, start at 2024-04-26 19:30:06.732777929 +0800 CST m=+5.373757945 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:06.739 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:06.705 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:06.705 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) changefeed count 0 check pass, pd_addr: http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli.cli.13643.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-cli-test-7715?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --tz=Asia/Shanghai -c=custom-changefeed-name [WARN] --tz is deprecated in changefeed settings. Create changefeed successfully! ID: custom-changefeed-name Info: {"upstream_id":7362136428262768517,"namespace":"default","id":"custom-changefeed-name","sink_uri":"kafka://127.0.0.1:9092/ticdc-cli-test-7715?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:30:08.26703932+08:00","start_ts":449349158996541446,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349158996541446,"checkpoint_ts":449349158996541446,"checkpoint_time":"2024-04-26 19:30:08.135"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... table test.snappy_finish_mark not exists for 2-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 74-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/cdc_server_tips Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Fri Apr 26 19:30:08 CST 2024] <<<<<< START cdc server in force_replicate_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.force_replicate_table.59025904.out server --log-file /tmp/tidb_cdc_test/force_replicate_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/force_replicate_table/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + set +x ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68add3c40018 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z9nq8-r4p88, pid:23487, start at 2024-04-26 19:30:06.732777929 +0800 CST m=+5.373757945 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:06.739 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:06.705 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:06.705 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68add5300008 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z9nq8-r4p88, pid:23569, start at 2024-04-26 19:30:06.804515092 +0800 CST m=+5.396812376 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:06.811 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:06.796 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:06.796 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/move_table/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/move_table/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/move_table/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/move_table/tiflash-proxy.toml"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/move_table/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table test.snappy_finish_mark exists check diff successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7521.out cli changefeed pause -c snappy VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68addb54000c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1, pid:13802, start at 2024-04-26 19:30:07.200435778 +0800 CST m=+5.182883637 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:07.206 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:07.189 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:07.189 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68addb54000c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1, pid:13802, start at 2024-04-26 19:30:07.200435778 +0800 CST m=+5.182883637 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:07.206 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:07.189 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:07.189 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68ade3d40015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1, pid:13887, start at 2024-04-26 19:30:07.772978788 +0800 CST m=+5.702444736 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:07.779 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:07.783 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:07.783 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/error.log arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table ddl_manager.finish_mark not exists for 75-th check, retry later PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... Verifying downstream PD is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.move_table.cli.24998.out cli tso query --pd=http://127.0.0.1:2379 + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7557.out cli changefeed remove -c snappy TEST FAILED: OUTPUT DOES NOT CONTAIN 'id: 1' ____________________________________ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ check data failed 1-th time, retry later check data successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:30:09 CST 2024] <<<<<< run test case ddl_puller_lag success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 76-th check, retry later Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:30:11 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f9134c20-1c5d-4b9d-918f-4d64ef7231d9 {"id":"f9134c20-1c5d-4b9d-918f-4d64ef7231d9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131009} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b3cdbd1 f9134c20-1c5d-4b9d-918f-4d64ef7231d9 /tidb/cdc/default/default/upstream/7362136559128972854 {"id":7362136559128972854,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f9134c20-1c5d-4b9d-918f-4d64ef7231d9 {"id":"f9134c20-1c5d-4b9d-918f-4d64ef7231d9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131009} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b3cdbd1 f9134c20-1c5d-4b9d-918f-4d64ef7231d9 /tidb/cdc/default/default/upstream/7362136559128972854 {"id":7362136559128972854,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f9134c20-1c5d-4b9d-918f-4d64ef7231d9 {"id":"f9134c20-1c5d-4b9d-918f-4d64ef7231d9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131009} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b3cdbd1 f9134c20-1c5d-4b9d-918f-4d64ef7231d9 /tidb/cdc/default/default/upstream/7362136559128972854 {"id":7362136559128972854,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: 4e855166-626d-4a58-bfec-470dd012c6d7 Info: {"upstream_id":7362136559128972854,"namespace":"default","id":"4e855166-626d-4a58-bfec-470dd012c6d7","sink_uri":"kafka://127.0.0.1:9092/ticdc-force_replicate_table-test-13157?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:30:12.193141239+08:00","start_ts":449349159185022979,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":true,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349159185022979,"checkpoint_ts":449349159185022979,"checkpoint_time":"2024-04-26 19:30:08.854"} [Fri Apr 26 19:30:12 CST 2024] <<<<<< START kafka consumer in force_replicate_table case >>>>>> consumer replica config found: /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/force_replicate_table/conf/changefeed.toml Changefeed remove successfully. ID: snappy CheckpointTs: 449349158257295387 SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-snappy-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=snappy PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... + cd /tmp/tidb_cdc_test/synced_status_with_redo ++ run_cdc_cli_tso_query 127.0.0.1 2379 + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.cli.15232.out cli tso query --pd=http://127.0.0.1:2379 + endpoints=http://127.0.0.1:2379 + changefeed_id=custom-changefeed-name + expected_state=normal + error_msg=null + tls_dir= + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c custom-changefeed-name -s + info='{ "upstream_id": 7362136428262768517, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449349159940259843, "checkpoint_time": "2024-04-26 19:30:11.735", "error": null }' + echo '{ "upstream_id": 7362136428262768517, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449349159940259843, "checkpoint_time": "2024-04-26 19:30:11.735", "error": null }' { "upstream_id": 7362136428262768517, "namespace": "default", "id": "custom-changefeed-name", "state": "normal", "checkpoint_tso": 449349159940259843, "checkpoint_time": "2024-04-26 19:30:11.735", "error": null } ++ echo '{' '"upstream_id":' 7362136428262768517, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449349159940259843, '"checkpoint_time":' '"2024-04-26' '19:30:11.735",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7362136428262768517, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449349159940259843, '"checkpoint_time":' '"2024-04-26' '19:30:11.735",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68ae23b8001a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-ww8h2-wvzdx, pid:9033, start at 2024-04-26 19:30:11.870573791 +0800 CST m=+5.208846895 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:11.877 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:11.872 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:11.872 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68ae23b8001a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-ww8h2-wvzdx, pid:9033, start at 2024-04-26 19:30:11.870573791 +0800 CST m=+5.208846895 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:11.877 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:11.872 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:11.872 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68ae25200009 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-ww8h2-wvzdx, pid:9122, start at 2024-04-26 19:30:11.921135688 +0800 CST m=+5.208292209 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:11.929 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:11.912 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:11.912 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_topics_v2/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_topics_v2/tiflash-proxy.toml"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + set +x + tso='449349160011300865 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349160011300865 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x ***************** properties ***************** "recordcount"="10000" "mysql.db"="move_table" "mysql.host"="127.0.0.1" "readallfields"="true" "mysql.port"="4000" "mysql.user"="root" "scanproportion"="0" "threadcount"="10" "readproportion"="0" "insertproportion"="0" "requestdistribution"="uniform" "workload"="core" "updateproportion"="0" "dotransactions"="false" "operationcount"="0" ********************************************** + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli.cli.13763.out cli changefeed create --start-ts=449349150909399041 '--sink-uri=kafka://127.0.0.1:9093/ticdc-cli-test-ssl-14611?protocol=open-protocol&ca=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem&cert=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem&key=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem&kafka-version=2.4.1&max-message-bytes=10485760&insecure-skip-verify=true' --tz=Asia/Shanghai [WARN] --tz is deprecated in changefeed settings. VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68ae323c0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7vksj-r2cpw, pid:7629, start at 2024-04-26 19:30:12.800927079 +0800 CST m=+5.219540635 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:12.807 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:12.800 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:12.800 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7594.out cli tso query --pd=http://127.0.0.1:2379 Create changefeed successfully! ID: 7043b372-340f-4321-a8d7-b4caa4769437 Info: {"upstream_id":7362136428262768517,"namespace":"default","id":"7043b372-340f-4321-a8d7-b4caa4769437","sink_uri":"kafka://127.0.0.1:9093/ticdc-cli-test-ssl-14611?protocol=open-protocol\u0026ca=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem\u0026cert=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem\u0026key=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760\u0026insecure-skip-verify=true","create_time":"2024-04-26T19:30:14.181686782+08:00","start_ts":449349150909399041,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349150909399041,"checkpoint_ts":449349150909399041,"checkpoint_time":"2024-04-26 19:29:37.285"} PASS + set +x + tso='449349160176975873 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349160176975873 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + start_ts=449349160176975873 + run_cdc_server --workdir /tmp/tidb_cdc_test/synced_status_with_redo --binary cdc.test [Fri Apr 26 19:30:14 CST 2024] <<<<<< START cdc server in synced_status_with_redo case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + (( i = 0 )) + GO_FAILPOINTS= + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.1526715269.out server --log-file /tmp/tidb_cdc_test/synced_status_with_redo/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/synced_status_with_redo/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 coverage: 2.4% of statements in github.com/pingcap/tiflow/... table ddl_manager.finish_mark exists Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_topics_v2.cli.10558.out cli tso query --pd=http://127.0.0.1:2379 check diff successfully Run finished, takes 1.306957296s INSERT - Takes(s): 1.3, Count: 10000, OPS: 7682.5, Avg(us): 1268, Min(us): 841, Max(us): 5329, 95th(us): 2000, 99th(us): 2000 [Fri Apr 26 19:30:15 CST 2024] <<<<<< START cdc server in move_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.move_table.2509225094.out server --log-file /tmp/tidb_cdc_test/move_table/cdc1.log --log-level debug --data-dir /tmp/tidb_cdc_test/move_table/cdc_data1 --cluster-id default --addr 127.0.0.1:8300 + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 wait process cdc.test exit for 1-th time... + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli.cli.13799.out cli unsafe delete-service-gc-safepoint wait process cdc.test exit for 2-th time... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68ae323c0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7vksj-r2cpw, pid:7629, start at 2024-04-26 19:30:12.800927079 +0800 CST m=+5.219540635 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:12.807 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:12.800 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:12.800 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68ae33c00015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7vksj-r2cpw, pid:7713, start at 2024-04-26 19:30:12.891998321 +0800 CST m=+5.245653802 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:12.899 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:12.898 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:12.898 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_simple_handle_key_only/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_simple_handle_key_only/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_simple_handle_key_only/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + set +x + tso='449349160616853509 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349160616853509 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7626.out cli changefeed create --start-ts=449349160616853509 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-lz4-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=lz4' -c lz4 Confirm that you know what this command will do and use it at your own risk [Y/N] CDC service GC safepoint truncated in PD! PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... Create changefeed successfully! ID: lz4 Info: {"upstream_id":7362136474245782833,"namespace":"default","id":"lz4","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-lz4-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=lz4","create_time":"2024-04-26T19:30:16.24506133+08:00","start_ts":449349160616853509,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349160616853509,"checkpoint_ts":449349160616853509,"checkpoint_time":"2024-04-26 19:30:14.316"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... wait process cdc.test exit for 3-th time... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + tso='449349160867463169 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349160867463169 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Fri Apr 26 19:30:16 CST 2024] <<<<<< START cdc server in multi_topics_v2 case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_topics_v2.1059910601.out server --log-file /tmp/tidb_cdc_test/multi_topics_v2/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_topics_v2/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 wait process cdc.test exit for 4-th time... + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli.cli.13841.out cli unsafe reset --no-confirm --pd=http://127.0.0.1:2379 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:30:17 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e7ee498a-c591-4043-908f-97cca9aebb8f {"id":"e7ee498a-c591-4043-908f-97cca9aebb8f","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131014} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b4ca0d5 e7ee498a-c591-4043-908f-97cca9aebb8f /tidb/cdc/default/default/upstream/7362136573558391674 {"id":7362136573558391674,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e7ee498a-c591-4043-908f-97cca9aebb8f {"id":"e7ee498a-c591-4043-908f-97cca9aebb8f","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131014} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b4ca0d5 e7ee498a-c591-4043-908f-97cca9aebb8f /tidb/cdc/default/default/upstream/7362136573558391674 {"id":7362136573558391674,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e7ee498a-c591-4043-908f-97cca9aebb8f {"id":"e7ee498a-c591-4043-908f-97cca9aebb8f","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131014} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b4ca0d5 e7ee498a-c591-4043-908f-97cca9aebb8f /tidb/cdc/default/default/upstream/7362136573558391674 {"id":7362136573558391674,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + config_path=conf/changefeed-redo.toml + SINK_URI='mysql://root@127.0.0.1:3306/?max-txn-row=1' + run_cdc_cli changefeed create --start-ts=449349160176975873 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/conf/changefeed-redo.toml + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.cli.15327.out cli changefeed create --start-ts=449349160176975873 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/conf/changefeed-redo.toml wait process cdc.test exit for 5-th time... Create changefeed successfully! ID: test-1 Info: {"upstream_id":7362136573558391674,"namespace":"default","id":"test-1","sink_uri":"mysql://root@127.0.0.1:3306/?max-txn-row=1","create_time":"2024-04-26T19:30:17.644271745+08:00","start_ts":449349160176975873,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"eventual","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"storage":"file:///tmp/tidb_cdc_test/synced_status/redo","use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":120,"checkpoint_interval":20}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349160176975873,"checkpoint_ts":449349160176975873,"checkpoint_time":"2024-04-26 19:30:12.638"} PASS wait process cdc.test exit for 6-th time... coverage: 2.5% of statements in github.com/pingcap/tiflow/... reset and all metadata truncated in PD! PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... table sink_retry.finish_mark_2 exists check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:30:15 CST 2024] <<<<<< run test case sink_retry success! >>>>>> + set +x [Fri Apr 26 19:30:17 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>> wait process cdc.test exit for 7-th time... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:30:18 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/68a3a36f-7390-425f-8bfa-4ac35c8631ed {"id":"68a3a36f-7390-425f-8bfa-4ac35c8631ed","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131015} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b4e0cf3 68a3a36f-7390-425f-8bfa-4ac35c8631ed /tidb/cdc/default/default/upstream/7362136581516025943 {"id":7362136581516025943,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/68a3a36f-7390-425f-8bfa-4ac35c8631ed {"id":"68a3a36f-7390-425f-8bfa-4ac35c8631ed","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131015} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b4e0cf3 68a3a36f-7390-425f-8bfa-4ac35c8631ed /tidb/cdc/default/default/upstream/7362136581516025943 {"id":7362136581516025943,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/68a3a36f-7390-425f-8bfa-4ac35c8631ed {"id":"68a3a36f-7390-425f-8bfa-4ac35c8631ed","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131015} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b4e0cf3 68a3a36f-7390-425f-8bfa-4ac35c8631ed /tidb/cdc/default/default/upstream/7362136581516025943 {"id":7362136581516025943,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.move_table.cli.25146.out cli changefeed create --start-ts=449349160011300865 '--sink-uri=kafka://127.0.0.1:9092/ticdc-move-table-test-30690?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' [Fri Apr 26 19:30:17 CST 2024] <<<<<< START cdc server in kafka_simple_handle_key_only case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only.91149116.out server --log-file /tmp/tidb_cdc_test/kafka_simple_handle_key_only/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_simple_handle_key_only/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Create changefeed successfully! ID: a7646103-d69c-4664-9c3c-c1c08533190b Info: {"upstream_id":7362136581516025943,"namespace":"default","id":"a7646103-d69c-4664-9c3c-c1c08533190b","sink_uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-30690?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:30:18.560938881+08:00","start_ts":449349160011300865,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349160011300865,"checkpoint_ts":449349160011300865,"checkpoint_time":"2024-04-26 19:30:12.006"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/04/26 19:30:16.206 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/04/26 19:30:16.241 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/04/26 19:30:16.340 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/04/26 19:30:16.349 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/04/26 19:30:17.339 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/04/26 19:30:17.348 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"]: syntax error: operand expected (error token is "[2024/04/26 19:30:16.206 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/04/26 19:30:16.241 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/04/26 19:30:16.340 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/04/26 19:30:16.349 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/04/26 19:30:17.339 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/04/26 19:30:17.348 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"]") table test.lz4_finish_mark not exists for 1-th check, retry later wait process cdc.test exit for 8-th time... table force_replicate_table.t0 exists table force_replicate_table.t1 exists table force_replicate_table.t2 exists table force_replicate_table.t3 not exists for 1-th check, retry later + set +x + run_sql 'USE TEST;Create table t1(a int primary key, b int);insert into t1 values(1,2);insert into t1 values(2,3);' + set +x wait process cdc.test exit for 9-th time... + check_table_exists test.t1 127.0.0.1 3306 table test.t1 not exists for 1-th check, retry later + set +x [Fri Apr 26 19:30:20 CST 2024] <<<<<< START kafka consumer in move_table case >>>>>> [Fri Apr 26 19:30:20 CST 2024] <<<<<< START cdc server in move_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.move_table.2518125183.out server --log-file /tmp/tidb_cdc_test/move_table/cdc2.log --log-level debug --data-dir /tmp/tidb_cdc_test/move_table/cdc_data2 --cluster-id default --addr 127.0.0.1:8301 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info * About to connect() to 127.0.0.1 port 8301 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8301; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 cdc.test: no process found wait process cdc.test exit for 10-th time... process cdc.test already exit [Fri Apr 26 19:30:19 CST 2024] <<<<<< run test case ddl_manager success! >>>>>> + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:30:19 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e782f656-8cc9-4567-992b-88bdd161dfaa {"id":"e782f656-8cc9-4567-992b-88bdd161dfaa","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131017} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b60decf e782f656-8cc9-4567-992b-88bdd161dfaa /tidb/cdc/default/default/upstream/7362136601743892102 {"id":7362136601743892102,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e782f656-8cc9-4567-992b-88bdd161dfaa {"id":"e782f656-8cc9-4567-992b-88bdd161dfaa","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131017} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b60decf e782f656-8cc9-4567-992b-88bdd161dfaa /tidb/cdc/default/default/upstream/7362136601743892102 {"id":7362136601743892102,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e782f656-8cc9-4567-992b-88bdd161dfaa {"id":"e782f656-8cc9-4567-992b-88bdd161dfaa","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131017} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b60decf e782f656-8cc9-4567-992b-88bdd161dfaa /tidb/cdc/default/default/upstream/7362136601743892102 {"id":7362136601743892102,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_topics_v2.cli.10659.out cli changefeed create --start-ts=449349160867463169 '--sink-uri=kafka://127.0.0.1:9092/multi_topics?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1' --config /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_topics_v2/conf/changefeed.toml Create changefeed successfully! ID: 8048a0ba-2018-4e24-a598-337278db5e10 Info: {"upstream_id":7362136601743892102,"namespace":"default","id":"8048a0ba-2018-4e24-a598-337278db5e10","sink_uri":"kafka://127.0.0.1:9092/multi_topics?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1","create_time":"2024-04-26T19:30:20.343976711+08:00","start_ts":449349160867463169,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"dispatchers":[{"matcher":["test.*"],"topic":"{schema}_{table}"}],"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349160867463169,"checkpoint_ts":449349160867463169,"checkpoint_time":"2024-04-26 19:30:15.272"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68ae9eec0008 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0, pid:23922, start at 2024-04-26 19:30:19.714232382 +0800 CST m=+5.177286576 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:19.720 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:19.707 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:19.707 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68ae9eec0008 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0, pid:23922, start at 2024-04-26 19:30:19.714232382 +0800 CST m=+5.177286576 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:19.720 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:19.707 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:19.707 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68aea1380015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0, pid:23993, start at 2024-04-26 19:30:19.876839048 +0800 CST m=+5.293847076 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:19.883 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:19.854 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:19.854 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/cdc_server_tips/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/cdc_server_tips/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/cdc_server_tips/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/cdc_server_tips/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/cdc_server_tips/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:30:21 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9307348c-00da-4bf2-9998-f93dda85b6e8 {"id":"9307348c-00da-4bf2-9998-f93dda85b6e8","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131018} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b592bcf 9307348c-00da-4bf2-9998-f93dda85b6e8 /tidb/cdc/default/default/upstream/7362136598435372836 {"id":7362136598435372836,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9307348c-00da-4bf2-9998-f93dda85b6e8 {"id":"9307348c-00da-4bf2-9998-f93dda85b6e8","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131018} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b592bcf 9307348c-00da-4bf2-9998-f93dda85b6e8 /tidb/cdc/default/default/upstream/7362136598435372836 {"id":7362136598435372836,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9307348c-00da-4bf2-9998-f93dda85b6e8 {"id":"9307348c-00da-4bf2-9998-f93dda85b6e8","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131018} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b592bcf 9307348c-00da-4bf2-9998-f93dda85b6e8 /tidb/cdc/default/default/upstream/7362136598435372836 {"id":7362136598435372836,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only.cli.9174.out cli tso query --pd=http://127.0.0.1:2379 table force_replicate_table.t3 exists table force_replicate_table.t4 not exists for 1-th check, retry later table test.lz4_finish_mark not exists for 2-th check, retry later table test.t1 exists + sleep 5 + set +x table test.finish_mark not exists for 1-th check, retry later table test.finish_mark not exists for 2-th check, retry later table test.finish_mark not exists for 3-th check, retry later table test.finish_mark not exists for 4-th check, retry later table test.finish_mark not exists for 5-th check, retry later + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli.cli.13951.out cli unsafe resolve-lock --region=86 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... + set +x + tso='449349162461036546 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349162461036546 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only.cli.9210.out cli changefeed create --start-ts=449349162461036546 '--sink-uri=kafka://127.0.0.1:9092/simple-handle-key-only-1776?protocol=simple' -c simple-handle-key-only --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only/conf/changefeed.toml table test.lz4_finish_mark exists check diff successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7754.out cli changefeed pause -c lz4 table test.finish_mark not exists for 6-th check, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cdc_server_tips.cli.25351.out cli tso query --pd=http://127.0.0.1:2379 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info * About to connect() to 127.0.0.1 port 8301 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8301 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8301 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:30:23 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** processors info ***: changefeedID: default/a7646103-d69c-4664-9c3c-c1c08533190b {UpstreamID:7362136581516025943 Namespace:default ID:a7646103-d69c-4664-9c3c-c1c08533190b SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-30690?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-04-26 19:30:18.560938881 +0800 CST StartTs:449349160011300865 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0012e8630 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-20-g16f5d59f9 Epoch:449349161728344065} {CheckpointTs:449349160469791067 MinTableBarrierTs:449349162737336326 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/68a3a36f-7390-425f-8bfa-4ac35c8631ed {"id":"68a3a36f-7390-425f-8bfa-4ac35c8631ed","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131015} /tidb/cdc/default/__cdc_meta__/capture/eaebb854-d460-4048-8042-389d5f9de6b4 {"id":"eaebb854-d460-4048-8042-389d5f9de6b4","address":"127.0.0.1:8301","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131020} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b4e0cf3 68a3a36f-7390-425f-8bfa-4ac35c8631ed /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b4e0d47 eaebb854-d460-4048-8042-389d5f9de6b4 /tidb/cdc/default/default/changefeed/info/a7646103-d69c-4664-9c3c-c1c08533190b {"upstream-id":7362136581516025943,"namespace":"default","changefeed-id":"a7646103-d69c-4664-9c3c-c1c08533190b","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-30690?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:30:18.560938881+08:00","start-ts":449349160011300865,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349161728344065} /tidb/cdc/default/default/changefeed/status/a7646103-d69c-4664-9c3c-c1c08533190b {"checkpoint-ts":449349160469791067,"min-table-barrier-ts":449349162737336326,"admin-job-type":0} /tidb/cdc/default/default/task/position/68a3a36f-7390-425f-8bfa-4ac35c8631ed/a7646103-d69c-4664-9c3c-c1c08533190b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/eaebb854-d460-4048-8042-389d5f9de6b4/a7646103-d69c-4664-9c3c-c1c08533190b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7362136581516025943 {"id":7362136581516025943,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** processors info ***: changefeedID: default/a7646103-d69c-4664-9c3c-c1c08533190b {UpstreamID:7362136581516025943 Namespace:default ID:a7646103-d69c-4664-9c3c-c1c08533190b SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-30690?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-04-26 19:30:18.560938881 +0800 CST StartTs:449349160011300865 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0012e8630 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-20-g16f5d59f9 Epoch:449349161728344065} {CheckpointTs:449349160469791067 MinTableBarrierTs:449349162737336326 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/68a3a36f-7390-425f-8bfa-4ac35c8631ed {"id":"68a3a36f-7390-425f-8bfa-4ac35c8631ed","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131015} /tidb/cdc/default/__cdc_meta__/capture/eaebb854-d460-4048-8042-389d5f9de6b4 {"id":"eaebb854-d460-4048-8042-389d5f9de6b4","address":"127.0.0.1:8301","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131020} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b4e0cf3 68a3a36f-7390-425f-8bfa-4ac35c8631ed /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b4e0d47 eaebb854-d460-4048-8042-389d5f9de6b4 /tidb/cdc/default/default/changefeed/info/a7646103-d69c-4664-9c3c-c1c08533190b {"upstream-id":7362136581516025943,"namespace":"default","changefeed-id":"a7646103-d69c-4664-9c3c-c1c08533190b","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-30690?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:30:18.560938881+08:00","start-ts":449349160011300865,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349161728344065} /tidb/cdc/default/default/changefeed/status/a7646103-d69c-4664-9c3c-c1c08533190b {"checkpoint-ts":449349160469791067,"min-table-barrier-ts":449349162737336326,"admin-job-type":0} /tidb/cdc/default/default/task/position/68a3a36f-7390-425f-8bfa-4ac35c8631ed/a7646103-d69c-4664-9c3c-c1c08533190b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/eaebb854-d460-4048-8042-389d5f9de6b4/a7646103-d69c-4664-9c3c-c1c08533190b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7362136581516025943 {"id":7362136581516025943,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** processors info ***: changefeedID: default/a7646103-d69c-4664-9c3c-c1c08533190b PASS {UpstreamID:7362136581516025943 Namespace:default ID:a7646103-d69c-4664-9c3c-c1c08533190b SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-30690?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-04-26 19:30:18.560938881 +0800 CST StartTs:449349160011300865 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0012e8630 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-20-g16f5d59f9 Epoch:449349161728344065} {CheckpointTs:449349160469791067 MinTableBarrierTs:449349162737336326 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/68a3a36f-7390-425f-8bfa-4ac35c8631ed {"id":"68a3a36f-7390-425f-8bfa-4ac35c8631ed","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131015} /tidb/cdc/default/__cdc_meta__/capture/eaebb854-d460-4048-8042-389d5f9de6b4 {"id":"eaebb854-d460-4048-8042-389d5f9de6b4","address":"127.0.0.1:8301","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131020} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b4e0cf3 68a3a36f-7390-425f-8bfa-4ac35c8631ed /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b4e0d47 eaebb854-d460-4048-8042-389d5f9de6b4 /tidb/cdc/default/default/changefeed/info/a7646103-d69c-4664-9c3c-c1c08533190b {"upstream-id":7362136581516025943,"namespace":"default","changefeed-id":"a7646103-d69c-4664-9c3c-c1c08533190b","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-30690?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:30:18.560938881+08:00","start-ts":449349160011300865,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349161728344065} /tidb/cdc/default/default/changefeed/status/a7646103-d69c-4664-9c3c-c1c08533190b {"checkpoint-ts":449349160469791067,+ grep -q 'etcd info' "min-table-barrier-ts":449349162737336326,"admin-job-type":0} /tidb/cdc/default/default/task/position/68a3a36f-7390-425f-8bfa-4ac35c8631ed/a7646103-d69c-4664-9c3c-c1c08533190b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/eaebb854-d460-4048-8042-389d5f9de6b4/a7646103-d69c-4664-9c3c-c1c08533190b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7362136581516025943 {"id":7362136581516025943,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + break + set +x [Fri Apr 26 19:30:23 CST 2024] <<<<<< START cdc server in move_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) + GO_FAILPOINTS= + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.move_table.2530125308.out server --log-file /tmp/tidb_cdc_test/move_table/cdc3.log --log-level debug --data-dir /tmp/tidb_cdc_test/move_table/cdc_data3 --cluster-id default --addr 127.0.0.1:8302 ++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info * About to connect() to 127.0.0.1 port 8302 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8302; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Create changefeed successfully! ID: simple-handle-key-only Info: {"upstream_id":7362136598435372836,"namespace":"default","id":"simple-handle-key-only","sink_uri":"kafka://127.0.0.1:9092/simple-handle-key-only-1776?protocol=simple","create_time":"2024-04-26T19:30:23.266686083+08:00","start_ts":449349162461036546,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"simple","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"kafka_config":{"large_message_handle":{"large_message_handle_option":"handle-key-only","large_message_handle_compression":"lz4","claim_check_storage_uri":""}},"advance_timeout":150,"send_bootstrap_interval_in_sec":0,"send_bootstrap_in_msg_count":0,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349162461036546,"checkpoint_ts":449349162461036546,"checkpoint_time":"2024-04-26 19:30:21.351"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... table force_replicate_table.t4 exists table force_replicate_table.t5 not exists for 1-th check, retry later coverage: 2.0% of statements in github.com/pingcap/tiflow/... + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli.cli.13983.out cli unsafe resolve-lock --region=86 --ts=449349161932554242 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/ddl_only_block_related_table/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... + set +x + tso='449349162947575809 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349162947575809 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... + set +x + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7785.out cli changefeed remove -c lz4 table test.finish_mark not exists for 7-th check, retry later try a VALID cdc server command [Fri Apr 26 19:30:25 CST 2024] <<<<<< START cdc server in cdc_server_tips case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) + GO_FAILPOINTS= + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cdc_server_tips.2538725389.out server --log-file /tmp/tidb_cdc_test/cdc_server_tips/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/cdc_server_tips/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table force_replicate_table.t5 exists table force_replicate_table.t6 not exists for 1-th check, retry later Changefeed remove successfully. ID: lz4 CheckpointTs: 449349161639215134 SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-lz4-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=lz4 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... + set +x % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 218 100 212 100 6 152k 4408 --:--:-- --:--:-- --:--:-- 207k { "error_msg": "[CDC:ErrAPIInvalidParam]invalid log level: json: cannot unmarshal string into Go value of type struct { Level string \"json:\\\"log_level\\\"\" }", "error_code": "CDC:ErrAPIInvalidParam" + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info * About to connect() to 127.0.0.1 port 8302 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8302 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8302 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:30:26 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** processors info ***: changefeedID: default/a7646103-d69c-4664-9c3c-c1c08533190b {UpstreamID:7362136581516025943 Namespace:default ID:a7646103-d69c-4664-9c3c-c1c08533190b SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-30690?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-04-26 19:30:18.560938881 +0800 CST StartTs:449349160011300865 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0010c2990 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-20-g16f5d59f9 Epoch:449349161728344065} {CheckpointTs:449349163261624327 MinTableBarrierTs:449349163524030470 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/1ee432cb-7cc5-48f4-8716-fe459d233902 {"id":"1ee432cb-7cc5-48f4-8716-fe459d233902","address":"127.0.0.1:8302","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131023} /tidb/cdc/default/__cdc_meta__/capture/68a3a36f-7390-425f-8bfa-4ac35c8631ed {"id":"68a3a36f-7390-425f-8bfa-4ac35c8631ed","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131015} /tidb/cdc/default/__cdc_meta__/capture/eaebb854-d460-4048-8042-389d5f9de6b4 {"id":"eaebb854-d460-4048-8042-389d5f9de6b4","address":"127.0.0.1:8301","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131020} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b4e0cf3 68a3a36f-7390-425f-8bfa-4ac35c8631ed /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b4e0d47 eaebb854-d460-4048-8042-389d5f9de6b4 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b4e0d7b 1ee432cb-7cc5-48f4-8716-fe459d233902 /tidb/cdc/default/default/changefeed/info/a7646103-d69c-4664-9c3c-c1c08533190b {"upstream-id":7362136581516025943,"namespace":"default","changefeed-id":"a7646103-d69c-4664-9c3c-c1c08533190b","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-30690?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:30:18.560938881+08:00","start-ts":449349160011300865,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349161728344065} /tidb/cdc/default/default/changefeed/status/a7646103-d69c-4664-9c3c-c1c08533190b {"checkpoint-ts":449349163261624327,"min-table-barrier-ts":449349163524030470,"admin-job-type":0} /tidb/cdc/default/default/task/position/1ee432cb-7cc5-48f4-8716-fe459d233902/a7646103-d69c-4664-9c3c-c1c08533190b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/68a3a36f-7390-425f-8bfa-4ac35c8631ed/a7646103-d69c-4664-9c3c-c1c08533190b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/eaebb854-d460-4048-8042-389d5f9de6b4/a7646103-d69c-4664-9c3c-c1c08533190b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7362136581516025943 {"id":7362136581516025943,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** processors info ***: changefeedID: default/a7646103-d69c-4664-9c3c-c1c08533190b {UpstreamID:7362136581516025943 Namespace:default ID:a7646103-d69c-4664-9c3c-c1c08533190b SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-30690?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-04-26 19:30:18.560938881 +0800 CST StartTs:449349160011300865 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0010c2990 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-20-g16f5d59f9 Epoch:449349161728344065} {CheckpointTs:449349163261624327 MinTableBarrierTs:449349163524030470 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/1ee432cb-7cc5-48f4-8716-fe459d233902 {"id":"1ee432cb-7cc5-48f4-8716-fe459d233902","address":"127.0.0.1:8302","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131023} /tidb/cdc/default/__cdc_meta__/capture/68a3a36f-7390-425f-8bfa-4ac35c8631ed {"id":"68a3a36f-7390-425f-8bfa-4ac35c8631ed","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131015} /tidb/cdc/default/__cdc_meta__/capture/eaebb854-d460-4048-8042-389d5f9de6b4 {"id":"eaebb854-d460-4048-8042-389d5f9de6b4","address":"127.0.0.1:8301","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131020} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b4e0cf3 68a3a36f-7390-425f-8bfa-4ac35c8631ed /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b4e0d47 eaebb854-d460-4048-8042-389d5f9de6b4 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b4e0d7b 1ee432cb-7cc5-48f4-8716-fe459d233902 /tidb/cdc/default/default/changefeed/info/a7646103-d69c-4664-9c3c-c1c08533190b {"upstream-id":7362136581516025943,"namespace":"default","changefeed-id":"a7646103-d69c-4664-9c3c-c1c08533190b","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-30690?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:30:18.560938881+08:00","start-ts":449349160011300865,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349161728344065} /tidb/cdc/default/default/changefeed/status/a7646103-d69c-4664-9c3c-c1c08533190b {"checkpoint-ts":449349163261624327,"min-table-barrier-ts":449349163524030470,"admin-job-type":0} /tidb/cdc/default/default/task/position/1ee432cb-7cc5-48f4-8716-fe459d233902/a7646103-d69c-4664-9c3c-c1c08533190b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/68a3a36f-7390-425f-8bfa-4ac35c8631ed/a7646103-d69c-4664-9c3c-c1c08533190b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/eaebb854-d460-4048-8042-389d5f9de6b4/a7646103-d69c-4664-9c3c-c1c08533190b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7362136581516025943 {"id":7362136581516025943,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + echo ' *** processors info ***: changefeedID: default/a7646103-d69c-4664-9c3c-c1c08533190b {UpstreamID:7362136581516025943 Namespace:default ID:a7646103-d69c-4664-9c3c-c1c08533190b SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-30690?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-04-26 19:30:18.560938881 +0800 CST StartTs:449349160011300865 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0010c2990 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-20-g16f5d59f9 Epoch:449349161728344065} {CheckpointTs:449349163261624327 MinTableBarrierTs:449349163524030470 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/1ee432cb-7cc5-48f4-8716-fe459d233902 {"id":"1ee432cb-7cc5-48f4-8716-fe459d233902","address":"127.0.0.1:8302","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131023} /tidb/cdc/default/__cdc_meta__/capture/68a3a36f-7390-425f-8bfa-4ac35c8631ed {"id":"68a3a36f-7390-425f-8bfa-4ac35c8631ed","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131015} /tidb/cdc/default/__cdc_meta__/capture/eaebb854-d460-4048-8042-389d5f9de6b4 {"id":"eaebb854-d460-4048-8042-389d5f9de6b4","address":"127.0.0.1:8301","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131020} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b4e0cf3 68a3a36f-7390-425f-8bfa-4ac35c8631ed /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b4e0d47 eaebb854-d460-4048-8042-389d5f9de6b4 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b4e0d7b 1ee432cb-7cc5-48f4-8716-fe459d233902 /tidb/cdc/default/default/changefeed/info/a7646103-d69c-4664-9c3c-c1c08533190b {"upstream-id":7362136581516025943,"namespace":"default","changefeed-id":"a7646103-d69c-4664-9c3c-c1c08533190b","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-30690?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:30:18.560938881+08:00","start-ts":449349160011300865,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349161728344065} /tidb/cdc/default/default/changefeed/status/a7646103-d69c-4664-9c3c-c1c08533190b {"checkpoint-ts":449349163261624327,"min-table-barrier-ts":449349163524030470,"admin-job-type":0} /tidb/cdc/default/default/task/position/1ee432cb-7cc5-48f4-8716-fe459d233902/a7646103-d69c-4664-9c3c-c1c08533190b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/68a3a36f-7390-425f-8bfa-4ac35c8631ed/a7646103-d69c-4664-9c3c-c1c08533190b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/eaebb854-d460-4048-8042-389d5f9de6b4/a7646103-d69c-4664-9c3c-c1c08533190b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7362136581516025943 {"id":7362136581516025943,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + break + set +x table move_table.usertable exists go: downloading github.com/IBM/sarama v1.41.2 go: downloading github.com/xdg/scram v1.0.5 go: downloading github.com/benbjohnson/clock v1.3.5 go: downloading github.com/apache/pulsar-client-go v0.11.0 go: downloading github.com/aws/aws-sdk-go-v2 v1.19.1 go: downloading github.com/cakturk/go-netstat v0.0.0-20200220111822-e5b49efee7a5 go: downloading github.com/KimMachineGun/automemlimit v0.2.4 go: downloading go.etcd.io/etcd/server/v3 v3.5.12 go: downloading github.com/go-mysql-org/go-mysql v1.7.1-0.20240314115043-2199dfb0ba98 go: downloading github.com/tinylib/msgp v1.1.6 go: downloading github.com/pierrec/lz4/v4 v4.1.18 go: downloading github.com/xdg/stringprep v1.0.3 go: downloading gorm.io/gorm v1.24.5 go: downloading github.com/gavv/monotime v0.0.0-20190418164738-30dba4353424 go: downloading github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 go: downloading github.com/phayes/freeport v0.0.0-20180830031419-95f893ade6f2 go: downloading github.com/containerd/cgroups v1.0.4 go: downloading github.com/stretchr/objx v0.5.2 go: downloading github.com/edwingeng/deque v0.0.0-20191220032131-8596380dee17 go: downloading github.com/gin-gonic/gin v1.9.1 go: downloading github.com/philhofer/fwd v1.1.1 go: downloading github.com/opencontainers/runtime-spec v1.0.2 go: downloading github.com/godbus/dbus/v5 v5.0.4 go: downloading github.com/cilium/ebpf v0.4.0 go: downloading github.com/sirupsen/logrus v1.9.3 go: downloading github.com/jinzhu/now v1.1.5 go: downloading github.com/jinzhu/inflection v1.0.0 go: downloading github.com/siddontang/go v0.0.0-20180604090527-bdc77568d726 go: downloading github.com/siddontang/go-log v0.0.0-20180807004314-8d05993dda07 go: downloading github.com/gin-contrib/sse v0.1.0 go: downloading github.com/go-playground/validator/v10 v10.14.0 go: downloading github.com/pelletier/go-toml/v2 v2.0.8 go: downloading github.com/ugorji/go/codec v1.2.11 go: downloading github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 go: downloading github.com/aws/smithy-go v1.13.5 go: downloading github.com/bits-and-blooms/bitset v1.4.0 go: downloading github.com/linkedin/goavro/v2 v2.11.1 go: downloading github.com/AthenZ/athenz v1.10.39 go: downloading github.com/pierrec/lz4 v2.6.1+incompatible go: downloading github.com/99designs/keyring v1.2.1 go: downloading golang.org/x/mod v0.17.0 go: downloading github.com/spaolacci/murmur3 v1.1.0 go: downloading github.com/eapache/go-resiliency v1.4.0 go: downloading github.com/jcmturner/gokrb5/v8 v8.4.4 go: downloading github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 go: downloading github.com/jcmturner/gofork v1.7.6 go: downloading github.com/eapache/queue v1.1.0 go: downloading github.com/hashicorp/go-multierror v1.1.1 go: downloading github.com/eapache/go-xerial-snappy v0.0.0-20230731223053-c322873962e3 go: downloading github.com/hashicorp/errwrap v1.0.0 go: downloading github.com/leodido/go-urn v1.2.4 go: downloading github.com/gabriel-vasile/mimetype v1.4.2 go: downloading github.com/go-playground/universal-translator v0.18.1 go: downloading github.com/dvsekhvalnov/jose2go v1.5.0 go: downloading github.com/mtibben/percent v0.2.1 go: downloading github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c go: downloading github.com/go-playground/locales v0.14.1 go: downloading github.com/soheilhy/cmux v0.1.5 go: downloading sigs.k8s.io/yaml v1.4.0 go: downloading go.etcd.io/etcd/pkg/v3 v3.5.12 go: downloading go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.22.0 go: downloading go.opentelemetry.io/otel/sdk v1.22.0 go: downloading go.etcd.io/etcd/raft/v3 v3.5.12 go: downloading go.etcd.io/bbolt v1.3.9 go: downloading github.com/jonboulle/clockwork v0.4.0 go: downloading github.com/tmc/grpc-websocket-proxy v0.0.0-20220101234140-673ab2c3ae75 go: downloading github.com/grpc-ecosystem/grpc-gateway v1.16.0 go: downloading github.com/xiang90/probing v0.0.0-20221125231312-a49e3df8f510 go: downloading github.com/golang-jwt/jwt/v4 v4.5.0 go: downloading go.etcd.io/etcd/client/v2 v2.305.12 go: downloading github.com/gorilla/websocket v1.5.1 go: downloading github.com/hashicorp/go-uuid v1.0.3 go: downloading github.com/jcmturner/aescts/v2 v2.0.0 go: downloading github.com/jcmturner/dnsutils/v2 v2.0.0 go: downloading github.com/jcmturner/rpc/v2 v2.0.3 go: downloading go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.22.0 go: downloading go.opentelemetry.io/proto/otlp v1.1.0 go: downloading github.com/cenkalti/backoff/v4 v4.2.1 go: downloading github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.1 start tidb cluster in /tmp/tidb_cdc_test/ddl_only_block_related_table Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... + set +x table test.finish_mark not exists for 8-th check, retry later + kill_tikv ++ ps aux ++ grep tikv-server ++ grep /tmp/tidb_cdc_test/synced_status_with_redo + info='jenkins 13042 28.2 0.5 4743024 2273084 ? Sl 19:29 0:07 tikv-server --pd 127.0.0.1:2379 -A 127.0.0.1:20160 --status-addr 127.0.0.1:20181 --log-file /tmp/tidb_cdc_test/synced_status_with_redo/tikv1.log --log-level debug -C /tmp/tidb_cdc_test/synced_status_with_redo/tikv-config.toml -s /tmp/tidb_cdc_test/synced_status_with_redo/tikv1 jenkins 13043 22.0 0.5 4705648 2232940 ? Sl 19:29 0:05 tikv-server --pd 127.0.0.1:2379 -A 127.0.0.1:20161 --status-addr 127.0.0.1:20182 --log-file /tmp/tidb_cdc_test/synced_status_with_redo/tikv2.log --log-level debug -C /tmp/tidb_cdc_test/synced_status_with_redo/tikv-config.toml -s /tmp/tidb_cdc_test/synced_status_with_redo/tikv2 jenkins 13044 21.3 0.5 4689780 2200112 ? Sl 19:29 0:05 tikv-server --pd 127.0.0.1:2379 -A 127.0.0.1:20162 --status-addr 127.0.0.1:20183 --log-file /tmp/tidb_cdc_test/synced_status_with_redo/tikv3.log --log-level debug -C /tmp/tidb_cdc_test/synced_status_with_redo/tikv-config.toml -s /tmp/tidb_cdc_test/synced_status_with_redo/tikv3 jenkins 13050 27.4 0.5 4718960 2263792 ? Sl 19:29 0:07 tikv-server --pd 127.0.0.1:2479 -A 127.0.0.1:21160 --status-addr 127.0.0.1:21180 --log-file /tmp/tidb_cdc_test/synced_status_with_redo/tikv_down.log --log-level debug -C /tmp/tidb_cdc_test/synced_status_with_redo/tikv-config.toml -s /tmp/tidb_cdc_test/synced_status_with_redo/tikv_down' ++ ps aux ++ grep tikv-server ++ grep /tmp/tidb_cdc_test/synced_status_with_redo ++ awk '{print $2}' ++ xargs kill -9 ++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 243 100 243 0 0 2574 0 --:--:-- --:--:-- --:--:-- 2585 + synced_status='{"synced":false,"sink_checkpoint_ts":"2024-04-26 19:30:25.038","puller_resolved_ts":"2024-04-26 19:30:19.089","last_synced_ts":"2024-04-26 19:30:19.588","now_ts":"2024-04-26 19:30:26.000","info":"The data syncing is not finished, please wait"}' ++ echo '{"synced":false,"sink_checkpoint_ts":"2024-04-26' '19:30:25.038","puller_resolved_ts":"2024-04-26' '19:30:19.089","last_synced_ts":"2024-04-26' '19:30:19.588","now_ts":"2024-04-26' '19:30:26.000","info":"The' data syncing is not finished, please 'wait"}' ++ jq .synced + status=false + '[' false '!=' false ']' ++ echo '{"synced":false,"sink_checkpoint_ts":"2024-04-26' '19:30:25.038","puller_resolved_ts":"2024-04-26' '19:30:19.089","last_synced_ts":"2024-04-26' '19:30:19.588","now_ts":"2024-04-26' '19:30:26.000","info":"The' data syncing is not finished, please 'wait"}' ++ jq -r .info + info='The data syncing is not finished, please wait' + target_message='The data syncing is not finished, please wait' + '[' 'The data syncing is not finished, please wait' '!=' 'The data syncing is not finished, please wait' ']' + sleep 130 + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7818.out cli tso query --pd=http://127.0.0.1:2379 table force_replicate_table.t6 exists check_data_subset force_replicate_table.t0 127.0.0.1 4000 127.0.0.1 3306 run task successfully check_data_subset force_replicate_table.t1 127.0.0.1 4000 127.0.0.1 3306 go: downloading github.com/ardielle/ardielle-go v1.5.2 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_error/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... run task successfully check_data_subset force_replicate_table.t2 127.0.0.1 4000 127.0.0.1 3306 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:30:28 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c2e4f694-b444-424b-8767-3b801cdc19a4 {"id":"c2e4f694-b444-424b-8767-3b801cdc19a4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131025} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b7b91e2 c2e4f694-b444-424b-8767-3b801cdc19a4 /tidb/cdc/default/default/upstream/7362136628528593893 {"id":7362136628528593893,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c2e4f694-b444-424b-8767-3b801cdc19a4 {"id":"c2e4f694-b444-424b-8767-3b801cdc19a4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131025} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b7b91e2 c2e4f694-b444-424b-8767-3b801cdc19a4 /tidb/cdc/default/default/upstream/7362136628528593893 {"id":7362136628528593893,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c2e4f694-b444-424b-8767-3b801cdc19a4 {"id":"c2e4f694-b444-424b-8767-3b801cdc19a4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131025} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2b7b91e2 c2e4f694-b444-424b-8767-3b801cdc19a4 /tidb/cdc/default/default/upstream/7362136628528593893 {"id":7362136628528593893,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + set +x + tso='449349164011618307 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349164011618307 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7859.out cli changefeed create --start-ts=449349164011618307 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-zstd-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=zstd' -c zstd Create changefeed successfully! ID: zstd Info: {"upstream_id":7362136474245782833,"namespace":"default","id":"zstd","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-zstd-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=zstd","create_time":"2024-04-26T19:30:29.149933365+08:00","start_ts":449349164011618307,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349164011618307,"checkpoint_ts":449349164011618307,"checkpoint_time":"2024-04-26 19:30:27.266"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... run task successfully check_data_subset force_replicate_table.t3 127.0.0.1 4000 127.0.0.1 3306 } % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 182 100 182 0 0 188k 0 --:--:-- --:--:-- --:--:-- 177k { "version": "v8.2.0-alpha-20-g16f5d59f9", "git_hash": "16f5d59f936001f6d7031387873b3c668f3c5ae6", "id": "b8c559fc-00ea-423d-ac6c-b321f784be1b", "pid": 12860, "is_owner": true table test.finish_mark not exists for 9-th check, retry later }wait process cdc.test exit for 1-th time... run task successfully check_data_subset force_replicate_table.t4 127.0.0.1 4000 127.0.0.1 3306 Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:30:30 CST 2024] <<<<<< run test case cli success! >>>>>> run task successfully check_data_subset force_replicate_table.t5 127.0.0.1 4000 127.0.0.1 3306 start tidb cluster in /tmp/tidb_cdc_test/changefeed_error Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... + set +x [Fri Apr 26 19:30:30 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>> table test.finish_mark not exists for 10-th check, retry later + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only.cli.9262.out cli changefeed pause -c simple-handle-key-only PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... run task successfully check_data_subset force_replicate_table.t6 127.0.0.1 4000 127.0.0.1 3306 id=19,a=NULL doesn't exist in downstream table force_replicate_table.t6 run task failed 1-th time, retry later /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/04/26 19:30:29.106 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/04/26 19:30:29.146 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/04/26 19:30:29.239 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/04/26 19:30:29.250 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/04/26 19:30:30.239 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/04/26 19:30:30.248 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"]: syntax error: operand expected (error token is "[2024/04/26 19:30:29.106 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/04/26 19:30:29.146 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/04/26 19:30:29.239 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/04/26 19:30:29.250 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/04/26 19:30:30.239 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/04/26 19:30:30.248 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"]") table test.zstd_finish_mark not exists for 1-th check, retry later + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only.cli.9297.out cli changefeed update -c simple-handle-key-only '--sink-uri=kafka://127.0.0.1:9092/simple-handle-key-only-1776?protocol=simple&max-message-bytes=700' --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only/conf/changefeed.toml --no-confirm Diff of changefeed config: {Type:update Path:[SinkURI] From:kafka://127.0.0.1:9092/simple-handle-key-only-1776?protocol=simple To:kafka://127.0.0.1:9092/simple-handle-key-only-1776?protocol=simple&max-message-bytes=700} {Type:update Path:[Config SyncPointInterval] From: To:0xc001bfd2f8} {Type:update Path:[Config SyncPointRetention] From: To:0xc001bfd308} {Type:update Path:[Config Consistent] From: To:0xc0012d5a40} Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Update changefeed config successfully! ID: simple-handle-key-only Info: {"upstream_id":7362136598435372836,"namespace":"default","id":"simple-handle-key-only","sink_uri":"kafka://127.0.0.1:9092/simple-handle-key-only-1776?protocol=simple\u0026max-message-bytes=700","create_time":"2024-04-26T19:30:23.266686083+08:00","start_ts":449349162461036546,"admin_job_type":1,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_table_monitor":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"simple","encoder_concurrency":32,"terminator":"\r\n","enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"kafka_config":{"large_message_handle":{"large_message_handle_option":"handle-key-only","large_message_handle_compression":"lz4","claim_check_storage_uri":""}},"advance_timeout":150,"send_bootstrap_interval_in_sec":0,"send_bootstrap_in_msg_count":0,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"stopped","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":0,"checkpoint_ts":449349164636831746,"checkpoint_time":"2024-04-26 19:30:29.651"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table test.finish_mark not exists for 11-th check, retry later check_data_subset force_replicate_table.t6 127.0.0.1 4000 127.0.0.1 3306 id=7,a=NULL doesn't exist in downstream table force_replicate_table.t6 run task failed 2-th time, retry later table test.zstd_finish_mark not exists for 2-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.finish_mark not exists for 12-th check, retry later table test.zstd_finish_mark exists check diff successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7988.out cli changefeed pause -c zstd + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only.cli.9334.out cli changefeed resume -c simple-handle-key-only PASS coverage: 2.1% of statements in github.com/pingcap/tiflow/... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check_data_subset force_replicate_table.t6 127.0.0.1 4000 127.0.0.1 3306 table test.finish_mark not exists for 13-th check, retry later + set +x table test.finish_mark not exists for 1-th check, retry later run task successfully VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68afaf480005 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z94t0-92h56, pid:7064, start at 2024-04-26 19:30:37.143137555 +0800 CST m=+5.057073481 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:37.149 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:37.138 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:37.138 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68afaf480005 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z94t0-92h56, pid:7064, start at 2024-04-26 19:30:37.143137555 +0800 CST m=+5.057073481 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:37.149 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:37.138 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:37.138 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68afb1940003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z94t0-92h56, pid:7146, start at 2024-04-26 19:30:37.28597229 +0800 CST m=+5.150983925 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:37.294 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:37.285 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:37.285 +0800 All versions after safe point can be accessed. (DO NOT EDIT) + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.8021.out cli changefeed remove -c zstd =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_source/run.sh using Sink-Type: kafka... <<================= Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/ddl_only_block_related_table/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/ddl_only_block_related_table/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/ddl_only_block_related_table/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/ddl_only_block_related_table/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/ddl_only_block_related_table/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Changefeed remove successfully. ID: zstd CheckpointTs: 449349165020872733 SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-zstd-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=zstd PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... wait process cdc.test exit for 1-th time... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 2-th time... table test.finish_mark not exists for 14-th check, retry later table test.finish_mark not exists for 2-th check, retry later wait process cdc.test exit for 3-th time... + set +x [2024/04/26 19:30:39.002 +08:00] [INFO] [main.go:61] ["table mover started"] [2024/04/26 19:30:39.004 +08:00] [INFO] [main.go:166] ["new cluster initialized"] [2024/04/26 19:30:39.005 +08:00] [DEBUG] [main.go:192] ["retrieved owner ID"] [ownerID=68a3a36f-7390-425f-8bfa-4ac35c8631ed] [2024/04/26 19:30:39.005 +08:00] [DEBUG] [main.go:199] ["retrieved owner addr"] [ownerAddr=127.0.0.1:8300] [2024/04/26 19:30:39.005 +08:00] [DEBUG] [main.go:210] ["retrieved changefeeds"] [changefeedsError="json: unsupported type: map[model.ChangeFeedID]*mvccpb.KeyValue"] [2024/04/26 19:30:39.188 +08:00] [DEBUG] [main.go:229] ["retrieved processor details"] [changefeed=a7646103-d69c-4664-9c3c-c1c08533190b] [captureID=1ee432cb-7cc5-48f4-8716-fe459d233902] [processorDetail="{\"table_ids\":[]}"] [2024/04/26 19:30:39.388 +08:00] [DEBUG] [main.go:229] ["retrieved processor details"] [changefeed=a7646103-d69c-4664-9c3c-c1c08533190b] [captureID=68a3a36f-7390-425f-8bfa-4ac35c8631ed] [processorDetail="{\"table_ids\":[106,108]}"] [2024/04/26 19:30:39.587 +08:00] [DEBUG] [main.go:229] ["retrieved processor details"] [changefeed=a7646103-d69c-4664-9c3c-c1c08533190b] [captureID=eaebb854-d460-4048-8042-389d5f9de6b4] [processorDetail="{\"table_ids\":[]}"] [2024/04/26 19:30:39.587 +08:00] [INFO] [main.go:75] ["task status"] [status="{\"1ee432cb-7cc5-48f4-8716-fe459d233902\":[],\"68a3a36f-7390-425f-8bfa-4ac35c8631ed\":[{\"ID\":106,\"Changefeed\":\"a7646103-d69c-4664-9c3c-c1c08533190b\"},{\"ID\":108,\"Changefeed\":\"a7646103-d69c-4664-9c3c-c1c08533190b\"}],\"eaebb854-d460-4048-8042-389d5f9de6b4\":[]}"] [2024/04/26 19:30:39.587 +08:00] [DEBUG] [main.go:288] ["preparing HTTP API call to owner"] [formStr="cf-id=a7646103-d69c-4664-9c3c-c1c08533190b&target-cp-id=eaebb854-d460-4048-8042-389d5f9de6b4&table-id=106"] [2024/04/26 19:30:39.638 +08:00] [INFO] [main.go:180] ["moved table successful"] [tableID=106] [2024/04/26 19:30:39.638 +08:00] [DEBUG] [main.go:288] ["preparing HTTP API call to owner"] [formStr="cf-id=a7646103-d69c-4664-9c3c-c1c08533190b&target-cp-id=eaebb854-d460-4048-8042-389d5f9de6b4&table-id=108"] [2024/04/26 19:30:39.687 +08:00] [INFO] [main.go:180] ["moved table successful"] [tableID=108] [2024/04/26 19:30:39.688 +08:00] [INFO] [main.go:114] ["all tables are moved"] [sourceCapture=68a3a36f-7390-425f-8bfa-4ac35c8631ed] [targetCapture=eaebb854-d460-4048-8042-389d5f9de6b4] table move_table.check1 exists check diff successfully cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Fri Apr 26 19:30:40 CST 2024] <<<<<< run test case force_replicate_table success! >>>>>> wait process cdc.test exit for 1-th time... [Fri Apr 26 19:30:40 CST 2024] <<<<<< START cdc server in ddl_only_block_related_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_only_block_related_table.85858587.out server --log-file /tmp/tidb_cdc_test/ddl_only_block_related_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_only_block_related_table/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 wait process cdc.test exit for 2-th time... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68afd9c0001f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h8q8j-cjcsw, pid:4173, start at 2024-04-26 19:30:39.894334706 +0800 CST m=+5.151564622 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:39.901 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:39.905 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:39.905 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68afd9c0001f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h8q8j-cjcsw, pid:4173, start at 2024-04-26 19:30:39.894334706 +0800 CST m=+5.151564622 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:39.901 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:39.905 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:39.905 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68afda780014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h8q8j-cjcsw, pid:4252, start at 2024-04-26 19:30:39.92388187 +0800 CST m=+5.131405501 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:39.931 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:39.902 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:39.902 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/changefeed_error/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/changefeed_error/tiflash/log/error.log arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_error/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_error/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_error/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table test.finish_mark not exists for 15-th check, retry later cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:30:41 CST 2024] <<<<<< run test case kafka_compression success! >>>>>> table test.finish_mark exists check diff successfully wait process cdc.test exit for 1-th time... =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/savepoint/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... wait process cdc.test exit for 2-th time... + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.5691.out cli tso query --pd=http://127.0.0.1:2379 table test.finish_mark not exists for 16-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > [2024/04/26 19:30:43.482 +08:00] [INFO] [main.go:61] ["table mover started"] [2024/04/26 19:30:43.485 +08:00] [INFO] [main.go:166] ["new cluster initialized"] [2024/04/26 19:30:43.485 +08:00] [DEBUG] [main.go:192] ["retrieved owner ID"] [ownerID=68a3a36f-7390-425f-8bfa-4ac35c8631ed] [2024/04/26 19:30:43.485 +08:00] [DEBUG] [main.go:199] ["retrieved owner addr"] [ownerAddr=127.0.0.1:8300] [2024/04/26 19:30:43.486 +08:00] [DEBUG] [main.go:210] ["retrieved changefeeds"] [changefeedsError="json: unsupported type: map[model.ChangeFeedID]*mvccpb.KeyValue"] cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:30:43 CST 2024] <<<<<< run test case kafka_simple_handle_key_only success! >>>>>> < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:30:43 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d66f925e-1932-40c3-b1af-bd85fccb14c2 {"id":"d66f925e-1932-40c3-b1af-bd85fccb14c2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131040} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bc234c4 d66f925e-1932-40c3-b1af-bd85fccb14c2 /tidb/cdc/default/default/upstream/7362136707987132970 {"id":7362136707987132970,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d66f925e-1932-40c3-b1af-bd85fccb14c2 {"id":"d66f925e-1932-40c3-b1af-bd85fccb14c2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131040} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bc234c4 d66f925e-1932-40c3-b1af-bd85fccb14c2 /tidb/cdc/default/default/upstream/7362136707987132970 {"id":7362136707987132970,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d66f925e-1932-40c3-b1af-bd85fccb14c2 {"id":"d66f925e-1932-40c3-b1af-bd85fccb14c2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131040} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bc234c4 d66f925e-1932-40c3-b1af-bd85fccb14c2 /tidb/cdc/default/default/upstream/7362136707987132970 {"id":7362136707987132970,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_only_block_related_table.cli.8645.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-common-1-test-26686?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c=ddl-only-block-related-table [2024/04/26 19:30:43.638 +08:00] [DEBUG] [main.go:229] ["retrieved processor details"] [changefeed=a7646103-d69c-4664-9c3c-c1c08533190b] [captureID=1ee432cb-7cc5-48f4-8716-fe459d233902] [processorDetail="{\"table_ids\":[110]}"] [2024/04/26 19:30:43.838 +08:00] [DEBUG] [main.go:229] ["retrieved processor details"] [changefeed=a7646103-d69c-4664-9c3c-c1c08533190b] [captureID=68a3a36f-7390-425f-8bfa-4ac35c8631ed] [processorDetail="{\"table_ids\":[]}"] Create changefeed successfully! ID: ddl-only-block-related-table Info: {"upstream_id":7362136707987132970,"namespace":"default","id":"ddl-only-block-related-table","sink_uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-26686?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:30:43.972484915+08:00","start_ts":449349168355868674,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349168355868674,"checkpoint_ts":449349168355868674,"checkpoint_time":"2024-04-26 19:30:43.838"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... [2024/04/26 19:30:44.038 +08:00] [DEBUG] [main.go:229] ["retrieved processor details"] [changefeed=a7646103-d69c-4664-9c3c-c1c08533190b] [captureID=eaebb854-d460-4048-8042-389d5f9de6b4] [processorDetail="{\"table_ids\":[108]}"] [2024/04/26 19:30:44.038 +08:00] [INFO] [main.go:75] ["task status"] [status="{\"1ee432cb-7cc5-48f4-8716-fe459d233902\":[{\"ID\":110,\"Changefeed\":\"a7646103-d69c-4664-9c3c-c1c08533190b\"}],\"68a3a36f-7390-425f-8bfa-4ac35c8631ed\":[],\"eaebb854-d460-4048-8042-389d5f9de6b4\":[{\"ID\":108,\"Changefeed\":\"a7646103-d69c-4664-9c3c-c1c08533190b\"}]}"] [2024/04/26 19:30:44.038 +08:00] [DEBUG] [main.go:288] ["preparing HTTP API call to owner"] [formStr="cf-id=a7646103-d69c-4664-9c3c-c1c08533190b&target-cp-id=eaebb854-d460-4048-8042-389d5f9de6b4&table-id=110"] [2024/04/26 19:30:44.088 +08:00] [INFO] [main.go:180] ["moved table successful"] [tableID=110] [2024/04/26 19:30:44.088 +08:00] [INFO] [main.go:114] ["all tables are moved"] [sourceCapture=1ee432cb-7cc5-48f4-8716-fe459d233902] [targetCapture=eaebb854-d460-4048-8042-389d5f9de6b4] check diff successfully table move_table.check2 not exists for 1-th check, retry later The 1 times to try to start tidb cluster... table test.finish_mark not exists for 17-th check, retry later + set +x + tso='449349168229515265 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349168229515265 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x ***************** properties ***************** "operationcount"="0" "mysql.host"="127.0.0.1" "readallfields"="true" "requestdistribution"="uniform" "workload"="core" "updateproportion"="0" "mysql.user"="root" "insertproportion"="0" "mysql.port"="4000" "mysql.db"="changefeed_error" "dotransactions"="false" "recordcount"="20" "scanproportion"="0" "threadcount"="4" "readproportion"="0" ********************************************** Run finished, takes 9.103078ms INSERT - Takes(s): 0.0, Count: 20, OPS: 3734.1, Avg(us): 1688, Min(us): 968, Max(us): 3667, 95th(us): 4000, 99th(us): 4000 [Fri Apr 26 19:30:45 CST 2024] <<<<<< START cdc server in changefeed_error case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/NewChangefeedNoRetryError=1*return(true)' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.57465748.out server --log-file /tmp/tidb_cdc_test/changefeed_error/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_error/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 start tidb cluster in /tmp/tidb_cdc_test/savepoint Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... + set +x [Fri Apr 26 19:30:45 CST 2024] <<<<<< START kafka consumer in ddl_only_block_related_table case >>>>>> table move_table.check2 exists check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... table test.finish_mark exists check diff successfully Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release wait process cdc.test exit for 1-th time... table ddl_only_block_related_table.finish_mark not exists for 1-th check, retry later wait process cdc.test exit for 3-th time... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:30:48 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/234b4bf3-5c13-42ac-ad70-f7a81cd993a4 {"id":"234b4bf3-5c13-42ac-ad70-f7a81cd993a4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131045} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bd087e8 234b4bf3-5c13-42ac-ad70-f7a81cd993a4 /tidb/cdc/default/default/upstream/7362136722485557923 {"id":7362136722485557923,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/234b4bf3-5c13-42ac-ad70-f7a81cd993a4 {"id":"234b4bf3-5c13-42ac-ad70-f7a81cd993a4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131045} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bd087e8 234b4bf3-5c13-42ac-ad70-f7a81cd993a4 /tidb/cdc/default/default/upstream/7362136722485557923 {"id":7362136722485557923,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/234b4bf3-5c13-42ac-ad70-f7a81cd993a4 {"id":"234b4bf3-5c13-42ac-ad70-f7a81cd993a4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131045} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bd087e8 234b4bf3-5c13-42ac-ad70-f7a81cd993a4 /tidb/cdc/default/default/upstream/7362136722485557923 {"id":7362136722485557923,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.5804.out cli changefeed create --start-ts=449349168229515265 '--sink-uri=kafka://127.0.0.1:9092/ticdc-sink-retry-test-17637?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c changefeed-error cdc.test: no process found wait process cdc.test exit for 2-th time... process cdc.test already exit [Fri Apr 26 19:30:48 CST 2024] <<<<<< run test case many_pk_or_uk success! >>>>>> Create changefeed successfully! ID: changefeed-error Info: {"upstream_id":7362136722485557923,"namespace":"default","id":"changefeed-error","sink_uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-17637?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:30:48.573280722+08:00","start_ts":449349168229515265,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349168229515265,"checkpoint_ts":449349168229515265,"checkpoint_time":"2024-04-26 19:30:43.356"} PASS cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Fri Apr 26 19:30:48 CST 2024] <<<<<< run test case move_table success! >>>>>> coverage: 2.4% of statements in github.com/pingcap/tiflow/... Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) start tidb cluster in /tmp/tidb_cdc_test/multi_source Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... + set +x [Fri Apr 26 19:30:50 CST 2024] <<<<<< START kafka consumer in changefeed_error case >>>>>> check_changefeed_state http://127.0.0.1:2379 changefeed-error failed [CDC:ErrStartTsBeforeGC] + endpoints=http://127.0.0.1:2379 + changefeed_id=changefeed-error + expected_state=failed + error_msg='[CDC:ErrStartTsBeforeGC]' + tls_dir='[CDC:ErrStartTsBeforeGC]' + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-error -s + info='{ "upstream_id": 7362136722485557923, "namespace": "default", "id": "changefeed-error", "state": "failed", "checkpoint_tso": 449349168229515265, "checkpoint_time": "2024-04-26 19:30:43.356", "error": { "time": "2024-04-26T19:30:48.663796838+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrStartTsBeforeGC", "message": "[CDC:ErrStartTsBeforeGC]fail to create or maintain changefeed because start-ts 449349168229514965 is earlier than or equal to GC safepoint at 449349168229515265" } }' + echo '{ "upstream_id": 7362136722485557923, "namespace": "default", "id": "changefeed-error", "state": "failed", "checkpoint_tso": 449349168229515265, "checkpoint_time": "2024-04-26 19:30:43.356", "error": { "time": "2024-04-26T19:30:48.663796838+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrStartTsBeforeGC", "message": "[CDC:ErrStartTsBeforeGC]fail to create or maintain changefeed because start-ts 449349168229514965 is earlier than or equal to GC safepoint at 449349168229515265" } }' { "upstream_id": 7362136722485557923, "namespace": "default", "id": "changefeed-error", "state": "failed", "checkpoint_tso": 449349168229515265, "checkpoint_time": "2024-04-26 19:30:43.356", "error": { "time": "2024-04-26T19:30:48.663796838+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrStartTsBeforeGC", "message": "[CDC:ErrStartTsBeforeGC]fail to create or maintain changefeed because start-ts 449349168229514965 is earlier than or equal to GC safepoint at 449349168229515265" } } ++ jq -r .state ++ echo '{' '"upstream_id":' 7362136722485557923, '"namespace":' '"default",' '"id":' '"changefeed-error",' '"state":' '"failed",' '"checkpoint_tso":' 449349168229515265, '"checkpoint_time":' '"2024-04-26' '19:30:43.356",' '"error":' '{' '"time":' '"2024-04-26T19:30:48.663796838+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrStartTsBeforeGC",' '"message":' '"[CDC:ErrStartTsBeforeGC]fail' to create or maintain changefeed because start-ts 449349168229514965 is earlier than or equal to GC safepoint at '449349168229515265"' '}' '}' + state=failed + [[ ! failed == \f\a\i\l\e\d ]] ++ jq -r .error.message ++ echo '{' '"upstream_id":' 7362136722485557923, '"namespace":' '"default",' '"id":' '"changefeed-error",' '"state":' '"failed",' '"checkpoint_tso":' 449349168229515265, '"checkpoint_time":' '"2024-04-26' '19:30:43.356",' '"error":' '{' '"time":' '"2024-04-26T19:30:48.663796838+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrStartTsBeforeGC",' '"message":' '"[CDC:ErrStartTsBeforeGC]fail' to create or maintain changefeed because start-ts 449349168229514965 is earlier than or equal to GC safepoint at '449349168229515265"' '}' '}' + message='[CDC:ErrStartTsBeforeGC]fail to create or maintain changefeed because start-ts 449349168229514965 is earlier than or equal to GC safepoint at 449349168229515265' + [[ ! [CDC:ErrStartTsBeforeGC]fail to create or maintain changefeed because start-ts 449349168229514965 is earlier than or equal to GC safepoint at 449349168229515265 =~ \[CDC:ErrStartTsBeforeGC] ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.5928.out cli changefeed resume -c changefeed-error valid ~~~ running cdc Failed to start cdc, the usage tips should be printed 1st test case cdc_server_tips success! try an INVALID cdc server command [Fri Apr 26 19:30:48 CST 2024] <<<<<< START cdc server in cdc_server_tips case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ true != \n\o ]] + set +x + GO_FAILPOINTS= + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cdc_server_tips.2547725479.out server --log-file /tmp/tidb_cdc_test/cdc_server_tips/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/cdc_server_tips/cdc_data --cluster-id default --pd None table ddl_only_block_related_table.finish_mark not exists for 2-th check, retry later PASS coverage: 2.1% of statements in github.com/pingcap/tiflow/... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_only_block_related_table.finish_mark not exists for 3-th check, retry later \033[0;36m<<< Run all test success >>>\033[0m + set +x table changefeed_error.usertable not exists for 1-th check, retry later [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc already exists) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // node Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] // stage [Pipeline] } =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_messages/run.sh using Sink-Type: kafka... <<================= [Fri Apr 26 19:30:52 CST 2024] <<<<<< run test case kafka_messages success! >>>>>> table ddl_only_block_related_table.finish_mark exists table changefeed_error.usertable exists wait process 8590 exit for 1-th time... check diff failed 1-th time, retry later Starting Upstream TiDB... wait process 8590 exit for 2-th time... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils/kill_cdc_pid: line 19: kill: (8590) - No such process wait process 8590 exit for 3-th time... process 8590 already exit [Fri Apr 26 19:30:55 CST 2024] <<<<<< START cdc server in ddl_only_block_related_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/ExecuteNotDone=return(true)' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_only_block_related_table.87658767.out server --log-file /tmp/tidb_cdc_test/ddl_only_block_related_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_only_block_related_table/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only/run.sh: line 1: 9369 Killed cdc_kafka_consumer --upstream-uri $SINK_URI --downstream-uri="mysql://root@127.0.0.1:3306/?safe-mode=true&batch-dml-enable=false" --upstream-tidb-dsn="root@tcp(${UP_TIDB_HOST}:${UP_TIDB_PORT})/?" --config="$CUR/conf/changefeed.toml" 2>&1 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only_avro/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b0bd2c0015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb, pid:15080, start at 2024-04-26 19:30:54.459488389 +0800 CST m=+5.110763428 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:54.468 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:54.461 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:54.461 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b0bd2c0015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb, pid:15080, start at 2024-04-26 19:30:54.459488389 +0800 CST m=+5.110763428 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:54.468 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:54.461 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:54.461 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b0be940006 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb, pid:15160, start at 2024-04-26 19:30:54.507382655 +0800 CST m=+5.109390882 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:32:54.515 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:30:54.501 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:20:54.501 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/savepoint/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/savepoint/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/savepoint/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/savepoint/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/savepoint/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } check diff successfully ***************** properties ***************** "readproportion"="0" "mysql.port"="4000" "readallfields"="true" "mysql.db"="changefeed_error" "workload"="core" "threadcount"="4" "requestdistribution"="uniform" "mysql.host"="127.0.0.1" "updateproportion"="0" "recordcount"="20" "operationcount"="0" "scanproportion"="0" "mysql.user"="root" "dotransactions"="false" "insertproportion"="0" ********************************************** Run finished, takes 5.021232ms INSERT - Takes(s): 0.0, Count: 20, OPS: 5368.3, Avg(us): 881, Min(us): 438, Max(us): 2452, 95th(us): 3000, 99th(us): 3000 check diff successfully {"id":"234b4bf3-5c13-42ac-ad70-f7a81cd993a4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131045} check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/capture' 'capture' + key_prefix=/tidb/cdc/default/__cdc_meta__/capture + message=capture ++ etcdctl get /tidb/cdc/default/__cdc_meta__/capture --prefix --keys-only + info=/tidb/cdc/default/__cdc_meta__/capture/234b4bf3-5c13-42ac-ad70-f7a81cd993a4 + [[ /tidb/cdc/default/__cdc_meta__/capture/234b4bf3-5c13-42ac-ad70-f7a81cd993a4 =~ capture ]] + echo 'capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/234b4bf3-5c13-42ac-ad70-f7a81cd993a4' capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/234b4bf3-5c13-42ac-ad70-f7a81cd993a4 + echo 'check failed' check failed + exit 1 run task failed 1-th time, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_sink_error_resume/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.savepoint.cli.16551.out cli tso query --pd=http://127.0.0.1:2379 check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/capture' 'capture' + key_prefix=/tidb/cdc/default/__cdc_meta__/capture + message=capture ++ etcdctl get /tidb/cdc/default/__cdc_meta__/capture --prefix --keys-only + info=/tidb/cdc/default/__cdc_meta__/capture/234b4bf3-5c13-42ac-ad70-f7a81cd993a4 + [[ /tidb/cdc/default/__cdc_meta__/capture/234b4bf3-5c13-42ac-ad70-f7a81cd993a4 =~ capture ]] + echo 'capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/234b4bf3-5c13-42ac-ad70-f7a81cd993a4' capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/234b4bf3-5c13-42ac-ad70-f7a81cd993a4 + echo 'check failed' check failed + exit 1 run task failed 2-th time, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:30:58 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: changefeedID: default/ddl-only-block-related-table {UpstreamID:7362136707987132970 Namespace:default ID:ddl-only-block-related-table SinkURI:kafka://127.0.0.1:9092/ticdc-common-1-test-26686?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-04-26 19:30:43.972484915 +0800 CST StartTs:449349168355868674 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0025e4630 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-20-g16f5d59f9 Epoch:449349168382083077} {CheckpointTs:449349172104527882 MinTableBarrierTs:449349172104527882 AdminJobType:noop} span: {table_id:108,start_key:7480000000000000ff6c5f720000000000fa,end_key:7480000000000000ff6c5f730000000000fa}, resolvedTs: 449349172104527882, checkpointTs: 449349172104527882, state: Replicating span: {table_id:110,start_key:7480000000000000ff6e5f720000000000fa,end_key:7480000000000000ff6e5f730000000000fa}, resolvedTs: 449349172104527882, checkpointTs: 449349172104527882, state: Replicating span: {table_id:112,start_key:7480000000000000ff705f720000000000fa,end_key:7480000000000000ff705f730000000000fa}, resolvedTs: 449349172104527882, checkpointTs: 449349172104527882, state: Replicating span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449349172104527882, checkpointTs: 449349172104527882, state: Replicating *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/55e5c782-1e2c-4db5-92d3-0e064667d4a5 {"id":"55e5c782-1e2c-4db5-92d3-0e064667d4a5","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131055} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bc235a0 55e5c782-1e2c-4db5-92d3-0e064667d4a5 /tidb/cdc/default/default/changefeed/info/ddl-only-block-related-table {"upstream-id":7362136707987132970,"namespace":"default","changefeed-id":"ddl-only-block-related-table","sink-uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-26686?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:30:43.972484915+08:00","start-ts":449349168355868674,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349168382083077} /tidb/cdc/default/default/changefeed/status/ddl-only-block-related-table {"checkpoint-ts":449349172104527882,"min-table-barrier-ts":449349172104527882,"admin-job-type":0} /tidb/cdc/default/default/task/position/55e5c782-1e2c-4db5-92d3-0e064667d4a5/ddl-only-block-related-table {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7362136707987132970 {"id":7362136707987132970,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-only-block-related-table {UpstreamID:7362136707987132970 Namespace:default ID:ddl-only-block-related-table SinkURI:kafka://127.0.0.1:9092/ticdc-common-1-test-26686?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-04-26 19:30:43.972484915 +0800 CST StartTs:449349168355868674 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0025e4630 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-20-g16f5d59f9 Epoch:449349168382083077} {CheckpointTs:449349172104527882 MinTableBarrierTs:449349172104527882 AdminJobType:noop} span: {table_id:108,start_key:7480000000000000ff6c5f720000000000fa,end_key:7480000000000000ff6c5f730000000000fa}, resolvedTs: 449349172104527882, checkpointTs: 449349172104527882, state: Replicating span: {table_id:110,start_key:7480000000000000ff6e5f720000000000fa,end_key:7480000000000000ff6e5f730000000000fa}, resolvedTs: 449349172104527882, checkpointTs: 449349172104527882, state: Replicating span: {table_id:112,start_key:7480000000000000ff705f720000000000fa,end_key:7480000000000000ff705f730000000000fa}, resolvedTs: 449349172104527882, checkpointTs: 449349172104527882, state: Replicating span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449349172104527882, checkpointTs: 449349172104527882, state: Replicating *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/55e5c782-1e2c-4db5-92d3-0e064667d4a5 {"id":"55e5c782-1e2c-4db5-92d3-0e064667d4a5","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131055} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bc235a0 55e5c782-1e2c-4db5-92d3-0e064667d4a5 /tidb/cdc/default/default/changefeed/info/ddl-only-block-related-table {"upstream-id":7362136707987132970,"namespace":"default","changefeed-id":"ddl-only-block-related-table","sink-uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-26686?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:30:43.972484915+08:00","start-ts":449349168355868674,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"syn+ grep -q 'failed to get info:' ced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349168382083077} /tidb/cdc/default/default/changefeed/status/ddl-only-block-related-table {"checkpoint-ts":449349172104527882,"min-table-barrier-ts":449349172104527882,"admin-job-type":0} /tidb/cdc/default/default/task/position/55e5c782-1e2c-4db5-92d3-0e064667d4a5/ddl-only-block-related-table {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7362136707987132970 {"id":7362136707987132970,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-only-block-related-table {UpstreamID:7362136707987132970 Namespace:default ID:ddl-only-block-related-table SinkURI:kafka://127.0.0.1:9092/ticdc-common-1-test-26686?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-04-26 19:30:43.972484915 +0800 CST StartTs:449349168355868674 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0025e4630 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-20-g16f5d59f9 Epoch:449349168382083077} {CheckpointTs:449349172104527882 MinTableBarrierTs:449349172104527882 AdminJobType:noop} span: {table_id:108,start_key:7480000000000000ff6c5f720000000000fa,end_key:7480000000000000ff6c5f730000000000fa}, resolvedTs: 449349172104527882, checkpointTs: 449349172104527882, state: Replicating span: {table_id:110,start_key:7480000000000000ff6e5f720000000000fa,end_key:7480000000000000ff6e5f730000000000fa}, resolvedTs: 449349172104527882, checkpointTs: 449349172104527882, state: Replicating span: {table_id:112,start_key:7480000000000000ff705f720000000000fa,end_key:7480000000000000ff705f730000000000fa}, resolvedTs: 449349172104527882, checkpointTs: 449349172104527882, state: Replicating span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449349172104527882, checkpointTs: 449349172104527882, state: Replicating *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/55e5c782-1e2c-4db5-92d3-0e064667d4a5 {"id":"55e5c782-1e2c-4db5-92d3-0e064667d4a5","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131055} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bc235a0 55e5c782-1e2c-4db5-92d3-0e064667d4a5 /tidb/cdc/default/default/changefeed/info/ddl-only-block-related-table {"upstream-id":7362136707987132970,"namespace":"default","changefeed-id":"ddl-only-block-related-table","sink-uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-26686?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:30:43.972484915+08:00","start-ts":449349168355868674,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"syn+ grep -q 'etcd info' ced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349168382083077} /tidb/cdc/default/default/changefeed/status/ddl-only-block-related-table {"checkpoint-ts":449349172104527882,"min-table-barrier-ts":449349172104527882,"admin-job-type":0} /tidb/cdc/default/default/task/position/55e5c782-1e2c-4db5-92d3-0e064667d4a5/ddl-only-block-related-table {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7362136707987132970 {"id":7362136707987132970,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + break + set +x check_ts_not_forward ddl-only-block-related-table + set +x + tso='449349172058128385 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349172058128385 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Fri Apr 26 19:30:59 CST 2024] <<<<<< START cdc server in savepoint case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.savepoint.1659116593.out server --log-file /tmp/tidb_cdc_test/savepoint/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/savepoint/cdc_data --cluster-id default + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) start tidb cluster in /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/kafka_sink_error_resume Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... [2024/04/26 19:30:55.575 +08:00] [WARN] [diff.go:182] ["table struct is not equal"] [reason="column num not equal, one is 34 another is 28"] =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/capture_session_done_during_task/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/autorandom/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/capture' 'capture' + key_prefix=/tidb/cdc/default/__cdc_meta__/capture + message=capture ++ etcdctl get /tidb/cdc/default/__cdc_meta__/capture --prefix --keys-only + info=/tidb/cdc/default/__cdc_meta__/capture/234b4bf3-5c13-42ac-ad70-f7a81cd993a4 + [[ /tidb/cdc/default/__cdc_meta__/capture/234b4bf3-5c13-42ac-ad70-f7a81cd993a4 =~ capture ]] + echo 'capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/234b4bf3-5c13-42ac-ad70-f7a81cd993a4' capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/234b4bf3-5c13-42ac-ad70-f7a81cd993a4 + echo 'check failed' check failed + exit 1 run task failed 3-th time, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:31:02 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9d6ac06a-ff9d-42af-aea7-46728daf92b9 {"id":"9d6ac06a-ff9d-42af-aea7-46728daf92b9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131059} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2c0b88ce 9d6ac06a-ff9d-42af-aea7-46728daf92b9 /tidb/cdc/default/default/upstream/7362136787716860773 {"id":7362136787716860773,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9d6ac06a-ff9d-42af-aea7-46728daf92b9 {"id":"9d6ac06a-ff9d-42af-aea7-46728daf92b9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131059} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2c0b88ce 9d6ac06a-ff9d-42af-aea7-46728daf92b9 /tidb/cdc/default/default/upstream/7362136787716860773 {"id":7362136787716860773,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9d6ac06a-ff9d-42af-aea7-46728daf92b9 {"id":"9d6ac06a-ff9d-42af-aea7-46728daf92b9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131059} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2c0b88ce 9d6ac06a-ff9d-42af-aea7-46728daf92b9 /tidb/cdc/default/default/upstream/7362136787716860773 {"id":7362136787716860773,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.savepoint.cli.16641.out cli changefeed create --start-ts=449349172058128385 '--sink-uri=kafka://127.0.0.1:9092/ticdc-savepoint-test-21568?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: da77b59a-c439-415a-873a-ca7f82ba3c92 Info: {"upstream_id":7362136787716860773,"namespace":"default","id":"da77b59a-c439-415a-873a-ca7f82ba3c92","sink_uri":"kafka://127.0.0.1:9092/ticdc-savepoint-test-21568?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:31:02.974296807+08:00","start_ts":449349172058128385,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349172058128385,"checkpoint_ts":449349172058128385,"checkpoint_time":"2024-04-26 19:30:57.961"} PASS start tidb cluster in /tmp/tidb_cdc_test/capture_session_done_during_task Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... coverage: 2.4% of statements in github.com/pingcap/tiflow/... start tidb cluster in /tmp/tidb_cdc_test/autorandom Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b131940003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2x98t-8x2nm, pid:6997, start at 2024-04-26 19:31:01.866056746 +0800 CST m=+6.671437737 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:01.881 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:01.861 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:01.861 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b131940003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2x98t-8x2nm, pid:6997, start at 2024-04-26 19:31:01.866056746 +0800 CST m=+6.671437737 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:01.881 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:01.861 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:01.861 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b1336c000e Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-2x98t-8x2nm, pid:7049, start at 2024-04-26 19:31:02.008980141 +0800 CST m=+6.734434349 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:02.024 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:02.029 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:02.029 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/multi_source/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/multi_source/tiflash/log/error.log arg matches is ArgMatches { args: {"log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_source/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_source/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_source/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + set +x [Fri Apr 26 19:31:04 CST 2024] <<<<<< START kafka consumer in savepoint case >>>>>> table savepoint.finish_mark not exists for 1-th check, retry later Verifying downstream PD is started... Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) run task failed 1-th time, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_source.cli.8410.out cli tso query --pd=http://127.0.0.1:2379 Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... check_ts_not_forward ddl-only-block-related-table table savepoint.finish_mark not exists for 2-th check, retry later Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + tso='449349174220816385 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349174220816385 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Fri Apr 26 19:31:08 CST 2024] <<<<<< START cdc server in multi_source case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_source.84348436.out server --log-file /tmp/tidb_cdc_test/multi_source/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_source/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + grep -q 'failed to get info:' + echo '' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 invalid ~~~ running cdc Failed to start cdc, the usage tips should be printed 2nd test case cdc_server_tips success! [Fri Apr 26 19:31:08 CST 2024] <<<<<< run all test cases cdc_server_tips success! >>>>>> table savepoint.finish_mark exists check diff successfully Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/capture' 'capture' + key_prefix=/tidb/cdc/default/__cdc_meta__/capture + message=capture ++ etcdctl get /tidb/cdc/default/__cdc_meta__/capture --prefix --keys-only + info= + [[ '' =~ capture ]] + echo 'check pass' check pass + exit 0 run task successfully check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/owner' 'owner' + key_prefix=/tidb/cdc/default/__cdc_meta__/owner + message=owner ++ etcdctl get /tidb/cdc/default/__cdc_meta__/owner --prefix --keys-only + info= + [[ '' =~ owner ]] + echo 'check pass' check pass + exit 0 run task successfully [Fri Apr 26 19:31:08 CST 2024] <<<<<< START cdc server in changefeed_error case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/NewChangefeedRetryError=return(true)' + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.61766178.out server --log-file /tmp/tidb_cdc_test/changefeed_error/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_error/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 wait process cdc.test exit for 1-th time... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 2-th time... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:31:10 CST 2024] <<<<<< run test case savepoint success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:31:11 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/20904d0e-c880-4a5e-99e0-ea0fdc9f2c28 {"id":"20904d0e-c880-4a5e-99e0-ea0fdc9f2c28","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131068} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2c1c4cd4 20904d0e-c880-4a5e-99e0-ea0fdc9f2c28 /tidb/cdc/default/default/upstream/7362136810712658672 {"id":7362136810712658672,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/20904d0e-c880-4a5e-99e0-ea0fdc9f2c28 {"id":"20904d0e-c880-4a5e-99e0-ea0fdc9f2c28","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131068} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2c1c4cd4 20904d0e-c880-4a5e-99e0-ea0fdc9f2c28 /tidb/cdc/default/default/upstream/7362136810712658672 {"id":7362136810712658672,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/20904d0e-c880-4a5e-99e0-ea0fdc9f2c28 {"id":"20904d0e-c880-4a5e-99e0-ea0fdc9f2c28","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131068} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2c1c4cd4 20904d0e-c880-4a5e-99e0-ea0fdc9f2c28 /tidb/cdc/default/default/upstream/7362136810712658672 {"id":7362136810712658672,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_source.cli.8472.out cli changefeed create --start-ts=449349174220816385 '--sink-uri=kafka://127.0.0.1:9092/ticdc-multi-source-test-12574?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Create changefeed successfully! ID: ea8d53e1-e307-45ea-a487-1312e898aef5 Info: {"upstream_id":7362136810712658672,"namespace":"default","id":"ea8d53e1-e307-45ea-a487-1312e898aef5","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-source-test-12574?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:31:11.708464785+08:00","start_ts":449349174220816385,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349174220816385,"checkpoint_ts":449349174220816385,"checkpoint_time":"2024-04-26 19:31:06.211"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:31:11 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/dbc9915a-dd17-432b-8121-a9f4cebc44bc {"id":"dbc9915a-dd17-432b-8121-a9f4cebc44bc","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131069} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 3 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bd088d1 dbc9915a-dd17-432b-8121-a9f4cebc44bc /tidb/cdc/default/default/changefeed/info/changefeed-error {"upstream-id":7362136722485557923,"namespace":"default","changefeed-id":"changefeed-error","sink-uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-17637?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:30:48.573280722+08:00","start-ts":449349168229515265,"target-ts":0,"admin-job-type":1,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"pending","error":{"time":"2024-04-26T19:31:09.349130535+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrOwnerUnknown","message":"failpoint injected retriable error"},"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349175031889928} /tidb/cdc/default/default/changefeed/status/changefeed-error {"checkpoint-ts":449349171493208069,"min-table-barrier-ts":449349171493208069,"admin-job-type":1} /tidb/cdc/default/default/upstream/7362136722485557923 {"id":7362136722485557923,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/dbc9915a-dd17-432b-8121-a9f4cebc44bc {"id":"dbc9915a-dd17-432b-8121-a9f4cebc44bc","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131069} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 3 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bd088d1 dbc9915a-dd17-432b-8121-a9f4cebc44bc /tidb/cdc/default/default/changefeed/info/changefeed-error {"upstream-id":7362136722485557923,"namespace":"default","changefeed-id":"changefeed-error","sink-uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-17637?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:30:48.573280722+08:00","start-ts":449349168229515265,"target-ts":0,"admin-job-type":1,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"pending","error":{"time":"2024-04-26T19:31:09.349130535+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrOwnerUnknown","message":"failpoint injected retriable error"},"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349175031889928} /tidb/cdc/default/default/changefeed/status/changefeed-error {"checkpoint-ts":449349171493208069,"min-table-barrier-ts":449349171493208069,"admin-job-type":1} /tidb/cdc/default/default/upstream/7362136722485557923 {"id":7362136722485557923,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/dbc9915a-dd17-432b-8121-a9f4cebc44bc {"id":"dbc9915a-dd17-432b-8121-a9f4cebc44bc","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131069} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 3 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bd088d1 dbc9915a-dd17-432b-8121-a9f4cebc44bc /tidb/cdc/default/default/changefeed/info/changefeed-error {"upstream-id":7362136722485557923,"namespace":"default","changefeed-id":"changefeed-error","sink-uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-17637?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:30:48.573280722+08:00","start-ts":449349168229515265,"target-ts":0,"admin-job-type":1,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"pending","error":{"time":"2024-04-26T19:31:09.349130535+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrOwnerUnknown","message":"failpoint injected retriable error"},"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349175031889928} /tidb/cdc/default/default/changefeed/status/changefeed-error {"checkpoint-ts":449349171493208069,"min-table-barrier-ts":449349171493208069,"admin-job-type":1} /tidb/cdc/default/default/upstream/7362136722485557923 {"id":7362136722485557923,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x check_changefeed_state http://127.0.0.1:2379 changefeed-error warning failpoint injected retriable error + endpoints=http://127.0.0.1:2379 + changefeed_id=changefeed-error + expected_state=warning + error_msg=failpoint + tls_dir=error + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-error -s ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + info='{ "upstream_id": 7362136722485557923, "namespace": "default", "id": "changefeed-error", "state": "warning", "checkpoint_tso": 449349171493208069, "checkpoint_time": "2024-04-26 19:30:55.806", "error": { "time": "2024-04-26T19:31:09.349130535+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrOwnerUnknown", "message": "failpoint injected retriable error" } }' + echo '{ "upstream_id": 7362136722485557923, "namespace": "default", "id": "changefeed-error", "state": "warning", "checkpoint_tso": 449349171493208069, "checkpoint_time": "2024-04-26 19:30:55.806", "error": { "time": "2024-04-26T19:31:09.349130535+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrOwnerUnknown", "message": "failpoint injected retriable error" } }' { "upstream_id": 7362136722485557923, "namespace": "default", "id": "changefeed-error", "state": "warning", "checkpoint_tso": 449349171493208069, "checkpoint_time": "2024-04-26 19:30:55.806", "error": { "time": "2024-04-26T19:31:09.349130535+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrOwnerUnknown", "message": "failpoint injected retriable error" } } ++ echo '{' '"upstream_id":' 7362136722485557923, '"namespace":' '"default",' '"id":' '"changefeed-error",' '"state":' '"warning",' '"checkpoint_tso":' 449349171493208069, '"checkpoint_time":' '"2024-04-26' '19:30:55.806",' '"error":' '{' '"time":' '"2024-04-26T19:31:09.349130535+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrOwnerUnknown",' '"message":' '"failpoint' injected retriable 'error"' '}' '}' ++ jq -r .state + state=warning + [[ ! warning == \w\a\r\n\i\n\g ]] ++ echo '{' '"upstream_id":' 7362136722485557923, '"namespace":' '"default",' '"id":' '"changefeed-error",' '"state":' '"warning",' '"checkpoint_tso":' 449349171493208069, '"checkpoint_time":' '"2024-04-26' '19:30:55.806",' '"error":' '{' '"time":' '"2024-04-26T19:31:09.349130535+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrOwnerUnknown",' '"message":' '"failpoint' injected retriable 'error"' '}' '}' ++ jq -r .error.message + message='failpoint injected retriable error' + [[ ! failpoint injected retriable error =~ failpoint ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6283.out cli changefeed remove -c changefeed-error ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Changefeed remove successfully. ID: changefeed-error CheckpointTs: 449349171493208069 SinkURI: kafka://127.0.0.1:9092/ticdc-sink-retry-test-17637?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b1b6080014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-52ncr-nbk6v, pid:9191, start at 2024-04-26 19:31:10.380552571 +0800 CST m=+5.195998744 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:10.387 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:10.388 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:10.388 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b1b6080014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-52ncr-nbk6v, pid:9191, start at 2024-04-26 19:31:10.380552571 +0800 CST m=+5.195998744 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:10.387 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:10.388 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:10.388 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b1b7880007 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-52ncr-nbk6v, pid:9278, start at 2024-04-26 19:31:10.441263967 +0800 CST m=+5.207663052 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:10.447 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:10.434 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:10.434 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + set +x [Fri Apr 26 19:31:13 CST 2024] <<<<<< START kafka consumer in multi_source case >>>>>> go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading go.uber.org/atomic v1.11.0 go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/coreos/go-semver v0.3.1 go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading github.com/golang/protobuf v1.5.4 go: downloading golang.org/x/net v0.24.0 go: downloading google.golang.org/protobuf v1.33.0 go: downloading golang.org/x/sys v0.19.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b1bee80006 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7vksj-r2cpw, pid:10542, start at 2024-04-26 19:31:10.913662766 +0800 CST m=+5.213825324 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:10.921 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:10.906 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:10.906 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b1bee80006 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7vksj-r2cpw, pid:10542, start at 2024-04-26 19:31:10.913662766 +0800 CST m=+5.213825324 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:10.921 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:10.906 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:10.906 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b1bed80014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7vksj-r2cpw, pid:10623, start at 2024-04-26 19:31:10.928254293 +0800 CST m=+5.179950806 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:10.934 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:10.902 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:10.902 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash-proxy.toml"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } go: downloading golang.org/x/text v0.14.0 + set +x check_no_changefeed 127.0.0.1:2379 parse error: Invalid numeric literal at line 1, column 6 run task successfully wait process cdc.test exit for 1-th time... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b1f5e80010 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z9nq8-r4p88, pid:30520, start at 2024-04-26 19:31:14.442591576 +0800 CST m=+5.203475908 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:14.449 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:14.426 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:14.426 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Fri Apr 26 19:31:15 CST 2024] <<<<<< START cdc server in kafka_sink_error_resume case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/sink/dmlsink/mq/dmlproducer/KafkaSinkAsyncSendError=1*return(true)' + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_sink_error_resume.1066110663.out server --log-file /tmp/tidb_cdc_test/kafka_sink_error_resume/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_sink_error_resume/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b1f1100017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-tmlzt-m5s5x, pid:20703, start at 2024-04-26 19:31:14.161921753 +0800 CST m=+5.164354001 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:14.168 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:14.166 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:14.166 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b1f1100017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-tmlzt-m5s5x, pid:20703, start at 2024-04-26 19:31:14.161921753 +0800 CST m=+5.164354001 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:14.168 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:14.166 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:14.166 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b1f3600007 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-tmlzt-m5s5x, pid:20777, start at 2024-04-26 19:31:14.270602811 +0800 CST m=+5.217355127 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:14.278 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:14.264 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:14.264 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/capture_session_done_during_task/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/capture_session_done_during_task/tiflash/log/error.log arg matches is ArgMatches { args: {"log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/capture_session_done_during_task/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/capture_session_done_during_task/tiflash-proxy.toml"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/capture_session_done_during_task/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } wait process cdc.test exit for 2-th time... [Fri Apr 26 19:31:15 CST 2024] <<<<<< START cdc server in kafka_simple_handle_key_only_avro case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only_avro.1201412016.out server --log-file /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading golang.org/x/sync v0.7.0 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/tidwall/btree v1.7.0 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/google/uuid v1.6.0 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/stretchr/testify v1.9.0 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading github.com/google/btree v1.1.2 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading google.golang.org/api v0.170.0 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading cloud.google.com/go v0.112.2 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading golang.org/x/tools v0.20.0 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/golang/snappy v0.0.4 go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading golang.org/x/time v0.5.0 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading golang.org/x/crypto v0.22.0 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/owner' 'owner' + key_prefix=/tidb/cdc/default/__cdc_meta__/owner + message=owner ++ etcdctl get /tidb/cdc/default/__cdc_meta__/owner --prefix --keys-only + info= + [[ '' =~ owner ]] + echo 'check pass' check pass + exit 0 run task successfully [Fri Apr 26 19:31:16 CST 2024] <<<<<< START cdc server in changefeed_error case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/InjectChangefeedDDLError=return(true)' + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.64056407.out server --log-file /tmp/tidb_cdc_test/changefeed_error/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_error/cdc_data --cluster-id default + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading github.com/jfcg/sixb v1.3.8 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading cloud.google.com/go/compute v1.25.1 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/robfig/cron v1.2.0 go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/apache/thrift v0.16.0 go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading github.com/kr/text v0.2.0 go: downloading github.com/go-logr/logr v1.4.1 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/go-logr/stdr v1.2.2 go: downloading github.com/DataDog/zstd v1.5.5 go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b1f5e80010 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z9nq8-r4p88, pid:30520, start at 2024-04-26 19:31:14.442591576 +0800 CST m=+5.203475908 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:14.449 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:14.426 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:14.426 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b1f5c00015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z9nq8-r4p88, pid:30603, start at 2024-04-26 19:31:14.442864602 +0800 CST m=+5.153065104 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:14.449 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:14.416 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:14.416 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/autorandom/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/autorandom/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/autorandom/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/autorandom/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/autorandom/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.capture_session_done_during_task.cli.22116.out cli tso query --pd=http://127.0.0.1:2379 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:31:18 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d391aa2c-ddd6-4e31-882d-04a62a521a11 {"id":"d391aa2c-ddd6-4e31-882d-04a62a521a11","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131075} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2c4380ce d391aa2c-ddd6-4e31-882d-04a62a521a11 /tidb/cdc/default/default/upstream/7362136855482045975 {"id":7362136855482045975,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d391aa2c-ddd6-4e31-882d-04a62a521a11 {"id":"d391aa2c-ddd6-4e31-882d-04a62a521a11","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131075} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2c4380ce d391aa2c-ddd6-4e31-882d-04a62a521a11 /tidb/cdc/default/default/upstream/7362136855482045975 {"id":7362136855482045975,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d391aa2c-ddd6-4e31-882d-04a62a521a11 {"id":"d391aa2c-ddd6-4e31-882d-04a62a521a11","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131075} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2c4380ce d391aa2c-ddd6-4e31-882d-04a62a521a11 /tidb/cdc/default/default/upstream/7362136855482045975 {"id":7362136855482045975,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Fri Apr 26 19:31:18 CST 2024] <<<<<< START kafka consumer in kafka_sink_error_resume case >>>>>> + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:31:19 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/189c07be-a7b9-4576-947a-87054e8de26d {"id":"189c07be-a7b9-4576-947a-87054e8de26d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131076} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2c4391cc 189c07be-a7b9-4576-947a-87054e8de26d /tidb/cdc/default/default/upstream/7362136852420710661 {"id":7362136852420710661,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/189c07be-a7b9-4576-947a-87054e8de26d {"id":"189c07be-a7b9-4576-947a-87054e8de26d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131076} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2c4391cc 189c07be-a7b9-4576-947a-87054e8de26d /tidb/cdc/default/default/upstream/7362136852420710661 {"id":7362136852420710661,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/189c07be-a7b9-4576-947a-87054e8de26d {"id":"189c07be-a7b9-4576-947a-87054e8de26d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131076} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2c4391cc 189c07be-a7b9-4576-947a-87054e8de26d /tidb/cdc/default/default/upstream/7362136852420710661 {"id":7362136852420710661,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only_avro.cli.12075.out cli tso query --pd=http://127.0.0.1:2379 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:31:19 GMT < Content-Length: 883 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d873f315-126a-4b7f-a692-5b1339399eaa {"id":"d873f315-126a-4b7f-a692-5b1339399eaa","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131076} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 4 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bd0892e d873f315-126a-4b7f-a692-5b1339399eaa /tidb/cdc/default/default/upstream/7362136722485557923 {"id":7362136722485557923,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d873f315-126a-4b7f-a692-5b1339399eaa {"id":"d873f315-126a-4b7f-a692-5b1339399eaa","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131076} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 4 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bd0892e d873f315-126a-4b7f-a692-5b1339399eaa /tidb/cdc/default/default/upstream/7362136722485557923 {"id":7362136722485557923,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d873f315-126a-4b7f-a692-5b1339399eaa {"id":"d873f315-126a-4b7f-a692-5b1339399eaa","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131076} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 4 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bd0892e d873f315-126a-4b7f-a692-5b1339399eaa /tidb/cdc/default/default/upstream/7362136722485557923 {"id":7362136722485557923,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6464.out cli changefeed create --start-ts=449349168229515265 '--sink-uri=kafka://127.0.0.1:9092/ticdc-sink-retry-test-17637?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c changefeed-error-1 Create changefeed successfully! ID: changefeed-error-1 Info: {"upstream_id":7362136722485557923,"namespace":"default","id":"changefeed-error-1","sink_uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-17637?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:31:19.533748602+08:00","start_ts":449349168229515265,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349168229515265,"checkpoint_ts":449349168229515265,"checkpoint_time":"2024-04-26 19:30:43.356"} PASS [Fri Apr 26 19:31:19 CST 2024] <<<<<< START cdc server in autorandom case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.autorandom.3203332035.out server --log-file /tmp/tidb_cdc_test/autorandom/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/autorandom/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 coverage: 2.4% of statements in github.com/pingcap/tiflow/... + set +x + tso='449349177302581249 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349177302581249 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Fri Apr 26 19:31:19 CST 2024] <<<<<< START cdc server in capture_session_done_during_task case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + (( i = 0 )) + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/processorManagerHandleNewChangefeedDelay=sleep(2000)' + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.capture_session_done_during_task.2215522157.out server --log-file /tmp/tidb_cdc_test/capture_session_done_during_task/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/capture_session_done_during_task/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/google/s2a-go v0.1.7 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading github.com/felixge/httpsnoop v1.0.4 go: downloading github.com/jmespath/go-jmespath v0.4.0 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/event_filter/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... check_changefeed_status 127.0.0.1:8300 fa0ae7fc-e422-4fe6-a09a-73997c17f90c warning last_warning kafka sink injected error + endpoint=127.0.0.1:8300 + changefeed_id=fa0ae7fc-e422-4fe6-a09a-73997c17f90c + expected_state=warning + field=last_warning + error_pattern=kafka ++ curl 127.0.0.1:8300/api/v2/changefeeds/fa0ae7fc-e422-4fe6-a09a-73997c17f90c/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 86 100 86 0 0 619 0 --:--:-- --:--:-- --:--:-- 623 + info='{"state":"normal","resolved_ts":449349177675350021,"checkpoint_ts":449349177675350021}' + echo '{"state":"normal","resolved_ts":449349177675350021,"checkpoint_ts":449349177675350021}' {"state":"normal","resolved_ts":449349177675350021,"checkpoint_ts":449349177675350021} ++ echo '{"state":"normal","resolved_ts":449349177675350021,"checkpoint_ts":449349177675350021}' ++ jq -r .state + state=normal + [[ ! normal == \w\a\r\n\i\n\g ]] + echo 'changefeed state normal does not equal to warning' changefeed state normal does not equal to warning + exit 1 run task failed 1-th time, retry later go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd + set +x check_changefeed_status 127.0.0.1:8300 changefeed-error-1 warning last_warning ErrExecDDLFailed + endpoint=127.0.0.1:8300 + changefeed_id=changefeed-error-1 + expected_state=warning + field=last_warning + error_pattern=ErrExecDDLFailed ++ curl 127.0.0.1:8300/api/v2/changefeeds/changefeed-error-1/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed + set +x 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 252 100 252 0 0 1626 0 --:--:-- --:--:-- --:--:-- 1636 + tso='449349177680068614 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349177680068614 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + info='{"state":"warning","resolved_ts":449349168609361946,"checkpoint_ts":449349168609361946,"last_warning":{"time":"2024-04-26T19:31:21.06555592+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrExecDDLFailed","message":"[CDC:ErrExecDDLFailed]exec DDL failed"}}' + echo '{"state":"warning","resolved_ts":449349168609361946,"checkpoint_ts":449349168609361946,"last_warning":{"time":"2024-04-26T19:31:21.06555592+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrExecDDLFailed","message":"[CDC:ErrExecDDLFailed]exec DDL failed"}}' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only_avro.cli.12115.out cli changefeed create --start-ts=449349177680068614 '--sink-uri=kafka://127.0.0.1:9092/simple-handle-key-only-avro-1498?protocol=simple&encoding-format=avro' -c simple-handle-key-only-avro --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only_avro/conf/changefeed.toml {"state":"warning","resolved_ts":449349168609361946,"checkpoint_ts":449349168609361946,"last_warning":{"time":"2024-04-26T19:31:21.06555592+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrExecDDLFailed","message":"[CDC:ErrExecDDLFailed]exec DDL failed"}} ++ echo '{"state":"warning","resolved_ts":449349168609361946,"checkpoint_ts":449349168609361946,"last_warning":{"time":"2024-04-26T19:31:21.06555592+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrExecDDLFailed","message":"[CDC:ErrExecDDLFailed]exec' DDL 'failed"}}' ++ jq -r .state + state=warning + [[ ! warning == \w\a\r\n\i\n\g ]] + [[ -z last_warning ]] ++ jq -r .last_warning.message ++ echo '{"state":"warning","resolved_ts":449349168609361946,"checkpoint_ts":449349168609361946,"last_warning":{"time":"2024-04-26T19:31:21.06555592+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrExecDDLFailed","message":"[CDC:ErrExecDDLFailed]exec' DDL 'failed"}}' + error_msg='[CDC:ErrExecDDLFailed]exec DDL failed' + [[ ! [CDC:ErrExecDDLFailed]exec DDL failed =~ ErrExecDDLFailed ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6524.out cli changefeed remove -c changefeed-error-1 Create changefeed successfully! ID: simple-handle-key-only-avro Info: {"upstream_id":7362136852420710661,"namespace":"default","id":"simple-handle-key-only-avro","sink_uri":"kafka://127.0.0.1:9092/simple-handle-key-only-avro-1498?protocol=simple\u0026encoding-format=avro","create_time":"2024-04-26T19:31:21.305479456+08:00","start_ts":449349177680068614,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"simple","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"kafka_config":{"large_message_handle":{"large_message_handle_option":"handle-key-only","large_message_handle_compression":"lz4","claim_check_storage_uri":""}},"advance_timeout":150,"send_bootstrap_interval_in_sec":0,"send_bootstrap_in_msg_count":0,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349177680068614,"checkpoint_ts":449349177680068614,"checkpoint_time":"2024-04-26 19:31:19.407"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... Changefeed remove successfully. ID: changefeed-error-1 CheckpointTs: 449349168609361946 SinkURI: kafka://127.0.0.1:9092/ticdc-sink-retry-test-17637?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... check_changefeed_status 127.0.0.1:8300 fa0ae7fc-e422-4fe6-a09a-73997c17f90c warning last_warning kafka sink injected error + endpoint=127.0.0.1:8300 + changefeed_id=fa0ae7fc-e422-4fe6-a09a-73997c17f90c + expected_state=warning + field=last_warning + error_pattern=kafka ++ curl 127.0.0.1:8300/api/v2/changefeeds/fa0ae7fc-e422-4fe6-a09a-73997c17f90c/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 86 100 86 0 0 769 0 --:--:-- --:--:-- --:--:-- 774 + info='{"state":"normal","resolved_ts":449349178422198276,"checkpoint_ts":449349177806159902}' + echo '{"state":"normal","resolved_ts":449349178422198276,"checkpoint_ts":449349177806159902}' {"state":"normal","resolved_ts":449349178422198276,"checkpoint_ts":449349177806159902} ++ echo '{"state":"normal","resolved_ts":449349178422198276,"checkpoint_ts":449349177806159902}' ++ jq -r .state + state=normal + [[ ! normal == \w\a\r\n\i\n\g ]] + echo 'changefeed state normal does not equal to warning' changefeed state normal does not equal to warning + exit 1 run task failed 2-th time, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status/run.sh using Sink-Type: kafka... <<================= +++ dirname /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status/run.sh ++ cd /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status ++ pwd + CUR=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status + source /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status/../_utils/test_prepare ++ UP_TIDB_HOST=127.0.0.1 ++ UP_TIDB_PORT=4000 ++ UP_TIDB_OTHER_PORT=4001 ++ UP_TIDB_STATUS=10080 ++ UP_TIDB_OTHER_STATUS=10081 ++ DOWN_TIDB_HOST=127.0.0.1 ++ DOWN_TIDB_PORT=3306 ++ DOWN_TIDB_STATUS=20080 ++ TLS_TIDB_HOST=127.0.0.1 ++ TLS_TIDB_PORT=3307 ++ TLS_TIDB_STATUS=30080 ++ UP_PD_HOST_1=127.0.0.1 ++ UP_PD_PORT_1=2379 ++ UP_PD_PEER_PORT_1=2380 ++ UP_PD_HOST_2=127.0.0.1 ++ UP_PD_PORT_2=2679 ++ UP_PD_PEER_PORT_2=2680 ++ UP_PD_HOST_3=127.0.0.1 ++ UP_PD_PORT_3=2779 ++ UP_PD_PEER_PORT_3=2780 ++ DOWN_PD_HOST=127.0.0.1 ++ DOWN_PD_PORT=2479 ++ DOWN_PD_PEER_PORT=2480 ++ TLS_PD_HOST=127.0.0.1 ++ TLS_PD_PORT=2579 ++ TLS_PD_PEER_PORT=2580 ++ UP_TIKV_HOST_1=127.0.0.1 ++ UP_TIKV_PORT_1=20160 ++ UP_TIKV_STATUS_PORT_1=20181 ++ UP_TIKV_HOST_2=127.0.0.1 ++ UP_TIKV_PORT_2=20161 ++ UP_TIKV_STATUS_PORT_2=20182 ++ UP_TIKV_HOST_3=127.0.0.1 ++ UP_TIKV_PORT_3=20162 ++ UP_TIKV_STATUS_PORT_3=20183 ++ DOWN_TIKV_HOST=127.0.0.1 ++ DOWN_TIKV_PORT=21160 ++ DOWN_TIKV_STATUS_PORT=21180 ++ TLS_TIKV_HOST=127.0.0.1 ++ TLS_TIKV_PORT=22160 ++ TLS_TIKV_STATUS_PORT=22180 +++ cat /tmp/tidb_cdc_test/KAFKA_VERSION +++ echo 2.4.1 ++ KAFKA_VERSION=2.4.1 + WORK_DIR=/tmp/tidb_cdc_test/synced_status + CDC_BINARY=cdc.test + SINK_TYPE=kafka + CDC_COUNT=3 + DB_COUNT=4 + trap stop_tidb_cluster EXIT + run_normal_case_and_unavailable_pd conf/changefeed.toml + rm -rf /tmp/tidb_cdc_test/synced_status + mkdir -p /tmp/tidb_cdc_test/synced_status + start_tidb_cluster --workdir /tmp/tidb_cdc_test/synced_status The 1 times to try to start tidb cluster... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:31:22 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/476a6b47-3c84-49bb-8edd-93cb835faa98 {"id":"476a6b47-3c84-49bb-8edd-93cb835faa98","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131079} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2c5152ca 476a6b47-3c84-49bb-8edd-93cb835faa98 /tidb/cdc/default/default/upstream/7362136869435054432 {"id":7362136869435054432,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/476a6b47-3c84-49bb-8edd-93cb835faa98 {"id":"476a6b47-3c84-49bb-8edd-93cb835faa98","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131079} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2c5152ca 476a6b47-3c84-49bb-8edd-93cb835faa98 /tidb/cdc/default/default/upstream/7362136869435054432 {"id":7362136869435054432,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/476a6b47-3c84-49bb-8edd-93cb835faa98 {"id":"476a6b47-3c84-49bb-8edd-93cb835faa98","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131079} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2c5152ca 476a6b47-3c84-49bb-8edd-93cb835faa98 /tidb/cdc/default/default/upstream/7362136869435054432 {"id":7362136869435054432,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: b32dd596-3f4f-4871-b05f-10b9284c35dc Info: {"upstream_id":7362136869435054432,"namespace":"default","id":"b32dd596-3f4f-4871-b05f-10b9284c35dc","sink_uri":"kafka://127.0.0.1:9092/ticdc-autorandom-test-8072?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:31:22.835407473+08:00","start_ts":449349178537017348,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349178537017348,"checkpoint_ts":449349178537017348,"checkpoint_time":"2024-04-26 19:31:22.676"} [Fri Apr 26 19:31:22 CST 2024] <<<<<< START kafka consumer in autorandom case >>>>>> table autorandom_test.table_a not exists for 1-th check, retry later + set +x + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:31:22 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b35f7367-70fe-41f7-a724-6c280af26217 {"id":"b35f7367-70fe-41f7-a724-6c280af26217","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131079} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2c504ff1 b35f7367-70fe-41f7-a724-6c280af26217 /tidb/cdc/default/default/upstream/7362136858498335570 {"id":7362136858498335570,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b35f7367-70fe-41f7-a724-6c280af26217 {"id":"b35f7367-70fe-41f7-a724-6c280af26217","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131079} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2c504ff1 b35f7367-70fe-41f7-a724-6c280af26217 /tidb/cdc/default/default/upstream/7362136858498335570 {"id":7362136858498335570,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b35f7367-70fe-41f7-a724-6c280af26217 {"id":"b35f7367-70fe-41f7-a724-6c280af26217","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131079} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2c504ff1 b35f7367-70fe-41f7-a724-6c280af26217 /tidb/cdc/default/default/upstream/7362136858498335570 {"id":7362136858498335570,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x start tidb cluster in /tmp/tidb_cdc_test/event_filter Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... + set +x [Fri Apr 26 19:31:23 CST 2024] <<<<<< START kafka consumer in capture_session_done_during_task case >>>>>> lease 22318f1a2c504ff1 revoked wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... table capture_session_done_during_task.t exists check diff failed 1-th time, retry later cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/owner' 'owner' + key_prefix=/tidb/cdc/default/__cdc_meta__/owner + message=owner ++ etcdctl get /tidb/cdc/default/__cdc_meta__/owner --prefix --keys-only + info= + [[ '' =~ owner ]] + echo 'check pass' check pass + exit 0 run task successfully [Fri Apr 26 19:31:25 CST 2024] <<<<<< START cdc server in changefeed_error case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/pkg/txnutil/gc/InjectActualGCSafePoint=return(9223372036854775807)' + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.65816583.out server --log-file /tmp/tidb_cdc_test/changefeed_error/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_error/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 start tidb cluster in /tmp/tidb_cdc_test/synced_status Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... table autorandom_test.table_a not exists for 2-th check, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release check_changefeed_status 127.0.0.1:8300 fa0ae7fc-e422-4fe6-a09a-73997c17f90c warning last_warning kafka sink injected error + endpoint=127.0.0.1:8300 + changefeed_id=fa0ae7fc-e422-4fe6-a09a-73997c17f90c + expected_state=warning + field=last_warning + error_pattern=kafka ++ curl 127.0.0.1:8300/api/v2/changefeeds/fa0ae7fc-e422-4fe6-a09a-73997c17f90c/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 2209 0 --:--:-- --:--:-- --:--:-- 2218 + info='{"state":"warning","resolved_ts":449349179470774276,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449349179470774276,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449349179470774276,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449349179470774276,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \w\a\r\n\i\n\g ]] + [[ -z last_warning ]] ++ echo '{"state":"warning","resolved_ts":449349179470774276,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .last_warning.message + error_msg='kafka sink injected error' + [[ ! kafka sink injected error =~ kafka ]] run task successfully table autorandom_test.table_a exists check diff successfully check_changefeed_status 127.0.0.1:8300 fa0ae7fc-e422-4fe6-a09a-73997c17f90c normal + endpoint=127.0.0.1:8300 + changefeed_id=fa0ae7fc-e422-4fe6-a09a-73997c17f90c + expected_state=normal + field= + error_pattern= ++ curl 127.0.0.1:8300/api/v2/changefeeds/fa0ae7fc-e422-4fe6-a09a-73997c17f90c/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed check diff failed 2-th time, retry later wait process cdc.test exit for 1-th time... 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 1820 0 --:--:-- --:--:-- --:--:-- 1820 100 244 100 244 0 0 1819 0 --:--:-- --:--:-- --:--:-- 1807 + info='{"state":"warning","resolved_ts":449349179732918276,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449349179732918276,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449349179732918276,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449349179732918276,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \n\o\r\m\a\l ]] + echo 'changefeed state warning does not equal to normal' changefeed state warning does not equal to normal + exit 1 run task failed 1-th time, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release wait process cdc.test exit for 2-th time... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:31:28 GMT < Content-Length: 883 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ecc2e2a9-e4fb-48c6-8d0a-c990e044f92e {"id":"ecc2e2a9-e4fb-48c6-8d0a-c990e044f92e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131085} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 5 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bd089ac ecc2e2a9-e4fb-48c6-8d0a-c990e044f92e /tidb/cdc/default/default/upstream/7362136722485557923 {"id":7362136722485557923,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ecc2e2a9-e4fb-48c6-8d0a-c990e044f92e {"id":"ecc2e2a9-e4fb-48c6-8d0a-c990e044f92e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131085} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 5 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bd089ac ecc2e2a9-e4fb-48c6-8d0a-c990e044f92e /tidb/cdc/default/default/upstream/7362136722485557923 {"id":7362136722485557923,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ecc2e2a9-e4fb-48c6-8d0a-c990e044f92e {"id":"ecc2e2a9-e4fb-48c6-8d0a-c990e044f92e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131085} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 5 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bd089ac ecc2e2a9-e4fb-48c6-8d0a-c990e044f92e /tidb/cdc/default/default/upstream/7362136722485557923 {"id":7362136722485557923,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6635.out cli changefeed create --start-ts=449349168229515265 '--sink-uri=kafka://127.0.0.1:9092/ticdc-sink-retry-test-17637?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c changefeed-error-2 Create changefeed successfully! ID: changefeed-error-2 Info: {"upstream_id":7362136722485557923,"namespace":"default","id":"changefeed-error-2","sink_uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-17637?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:31:28.64910921+08:00","start_ts":449349168229515265,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349168229515265,"checkpoint_ts":449349168229515265,"checkpoint_time":"2024-04-26 19:30:43.356"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:31:28 CST 2024] <<<<<< run test case autorandom success! >>>>>> Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 3-th time, retry later + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only_avro.cli.12165.out cli changefeed pause -c simple-handle-key-only-avro check_changefeed_status 127.0.0.1:8300 fa0ae7fc-e422-4fe6-a09a-73997c17f90c normal + endpoint=127.0.0.1:8300 + changefeed_id=fa0ae7fc-e422-4fe6-a09a-73997c17f90c + expected_state=normal + field= + error_pattern= ++ curl 127.0.0.1:8300/api/v2/changefeeds/fa0ae7fc-e422-4fe6-a09a-73997c17f90c/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) PASS 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 2242 0 --:--:-- --:--:-- --:--:-- 2259 + info='{"state":"warning","resolved_ts":449349180257468420,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449349180257468420,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449349180257468420,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449349180257468420,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \n\o\r\m\a\l ]] + echo 'changefeed state warning does not equal to normal' changefeed state warning does not equal to normal + exit 1 run task failed 2-th time, retry later coverage: 2.0% of statements in github.com/pingcap/tiflow/... + set +x check_changefeed_state http://127.0.0.1:2379 changefeed-error-2 failed [CDC:ErrSnapshotLostByGC] + endpoints=http://127.0.0.1:2379 + changefeed_id=changefeed-error-2 + expected_state=failed + error_msg='[CDC:ErrSnapshotLostByGC]' + tls_dir='[CDC:ErrSnapshotLostByGC]' + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-error-2 -s + info='{ "upstream_id": 7362136722485557923, "namespace": "default", "id": "changefeed-error-2", "state": "failed", "checkpoint_tso": 449349168229515265, "checkpoint_time": "2024-04-26 19:30:43.356", "error": { "time": "2024-04-26T19:31:28.732046906+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSnapshotLostByGC", "message": "[CDC:ErrSnapshotLostByGC]fail to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449349168229515265 is earlier than or equal to GC safepoint at 9223372036854775807" } }' + echo '{ "upstream_id": 7362136722485557923, "namespace": "default", "id": "changefeed-error-2", "state": "failed", "checkpoint_tso": 449349168229515265, "checkpoint_time": "2024-04-26 19:30:43.356", "error": { "time": "2024-04-26T19:31:28.732046906+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSnapshotLostByGC", "message": "[CDC:ErrSnapshotLostByGC]fail to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449349168229515265 is earlier than or equal to GC safepoint at 9223372036854775807" } }' { "upstream_id": 7362136722485557923, "namespace": "default", "id": "changefeed-error-2", "state": "failed", "checkpoint_tso": 449349168229515265, "checkpoint_time": "2024-04-26 19:30:43.356", "error": { "time": "2024-04-26T19:31:28.732046906+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSnapshotLostByGC", "message": "[CDC:ErrSnapshotLostByGC]fail to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449349168229515265 is earlier than or equal to GC safepoint at 9223372036854775807" } } ++ jq -r .state ++ echo '{' '"upstream_id":' 7362136722485557923, '"namespace":' '"default",' '"id":' '"changefeed-error-2",' '"state":' '"failed",' '"checkpoint_tso":' 449349168229515265, '"checkpoint_time":' '"2024-04-26' '19:30:43.356",' '"error":' '{' '"time":' '"2024-04-26T19:31:28.732046906+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrSnapshotLostByGC",' '"message":' '"[CDC:ErrSnapshotLostByGC]fail' to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449349168229515265 is earlier than or equal to GC safepoint at '9223372036854775807"' '}' '}' + state=failed + [[ ! failed == \f\a\i\l\e\d ]] ++ jq -r .error.message ++ echo '{' '"upstream_id":' 7362136722485557923, '"namespace":' '"default",' '"id":' '"changefeed-error-2",' '"state":' '"failed",' '"checkpoint_tso":' 449349168229515265, '"checkpoint_time":' '"2024-04-26' '19:30:43.356",' '"error":' '{' '"time":' '"2024-04-26T19:31:28.732046906+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrSnapshotLostByGC",' '"message":' '"[CDC:ErrSnapshotLostByGC]fail' to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449349168229515265 is earlier than or equal to GC safepoint at '9223372036854775807"' '}' '}' + message='[CDC:ErrSnapshotLostByGC]fail to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449349168229515265 is earlier than or equal to GC safepoint at 9223372036854775807' + [[ ! [CDC:ErrSnapshotLostByGC]fail to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449349168229515265 is earlier than or equal to GC safepoint at 9223372036854775807 =~ \[CDC:ErrSnapshotLostByGC] ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6714.out cli changefeed remove -c changefeed-error-2 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Changefeed remove successfully. ID: changefeed-error-2 CheckpointTs: 449349168229515265 SinkURI: kafka://127.0.0.1:9092/ticdc-sink-retry-test-17637?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... check diff successfully + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only_avro.cli.12198.out cli changefeed update -c simple-handle-key-only-avro '--sink-uri=kafka://127.0.0.1:9092/simple-handle-key-only-avro-1498?protocol=simple&encoding-format=avro&max-message-bytes=650' --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only_avro/conf/changefeed.toml --no-confirm check diff failed 1-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Diff of changefeed config: {Type:update Path:[SinkURI] From:kafka://127.0.0.1:9092/simple-handle-key-only-avro-1498?protocol=simple&encoding-format=avro To:kafka://127.0.0.1:9092/simple-handle-key-only-avro-1498?protocol=simple&encoding-format=avro&max-message-bytes=650} {Type:update Path:[Config SyncPointInterval] From: To:0xc0038ce3c8} {Type:update Path:[Config SyncPointRetention] From: To:0xc0038ce3d8} {Type:update Path:[Config Consistent] From: To:0xc000f924d0} Update changefeed config successfully! ID: simple-handle-key-only-avro Info: {"upstream_id":7362136852420710661,"namespace":"default","id":"simple-handle-key-only-avro","sink_uri":"kafka://127.0.0.1:9092/simple-handle-key-only-avro-1498?protocol=simple\u0026encoding-format=avro\u0026max-message-bytes=650","create_time":"2024-04-26T19:31:21.305479456+08:00","start_ts":449349177680068614,"admin_job_type":1,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_table_monitor":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"simple","encoder_concurrency":32,"terminator":"\r\n","enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"kafka_config":{"large_message_handle":{"large_message_handle_option":"handle-key-only","large_message_handle_compression":"lz4","claim_check_storage_uri":""}},"advance_timeout":150,"send_bootstrap_interval_in_sec":0,"send_bootstrap_in_msg_count":0,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"stopped","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":0,"checkpoint_ts":449349180131115013,"checkpoint_time":"2024-04-26 19:31:28.757"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... + set +x ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 1-th time... + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only_avro.cli.12236.out cli changefeed resume -c simple-handle-key-only-avro check diff successfully ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 1-th time... process cdc.test already exit [Fri Apr 26 19:31:33 CST 2024] <<<<<< run test case capture_session_done_during_task success! >>>>>> check_changefeed_status 127.0.0.1:8300 fa0ae7fc-e422-4fe6-a09a-73997c17f90c normal + endpoint=127.0.0.1:8300 + changefeed_id=fa0ae7fc-e422-4fe6-a09a-73997c17f90c + expected_state=normal + field= + error_pattern= ++ curl 127.0.0.1:8300/api/v2/changefeeds/fa0ae7fc-e422-4fe6-a09a-73997c17f90c/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 2221 0 --:--:-- --:--:-- --:--:-- 2238 + info='{"state":"warning","resolved_ts":449349181305782278,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449349181305782278,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449349181305782278,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449349181305782278,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \n\o\r\m\a\l ]] + echo 'changefeed state warning does not equal to normal' changefeed state warning does not equal to normal + exit 1 run task failed 3-th time, retry later PASS coverage: 2.1% of statements in github.com/pingcap/tiflow/... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:31:34 CST 2024] <<<<<< START cdc server in changefeed_error case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/redo/ChangefeedNewRedoManagerError=2*return(true)' + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.67586760.out server --log-file /tmp/tidb_cdc_test/changefeed_error/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_error/cdc_data --cluster-id default + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b325d8001c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb, pid:17796, start at 2024-04-26 19:31:33.920507899 +0800 CST m=+5.151350695 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:33.927 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:33.929 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:33.929 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b325d8001c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb, pid:17796, start at 2024-04-26 19:31:33.920507899 +0800 CST m=+5.151350695 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:33.927 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:33.929 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:33.929 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b326780013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-4851m-h3czb, pid:17893, start at 2024-04-26 19:31:33.953575952 +0800 CST m=+5.132910364 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:33.960 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:33.918 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:33.918 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/synced_status/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/synced_status/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/synced_status/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/synced_status/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/synced_status/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + set +x VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b33114000c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0, pid:26575, start at 2024-04-26 19:31:34.61217441 +0800 CST m=+5.223688670 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:34.618 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:34.597 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:34.597 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.finish_mark not exists for 1-th check, retry later + cd /tmp/tidb_cdc_test/synced_status ++ run_cdc_cli_tso_query 127.0.0.1 2379 + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status.cli.19352.out cli tso query --pd=http://127.0.0.1:2379 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:31:37 GMT < Content-Length: 883 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/89c2705f-6f98-4096-aa75-f80c35a04f8e {"id":"89c2705f-6f98-4096-aa75-f80c35a04f8e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131094} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 7 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bd08a07 89c2705f-6f98-4096-aa75-f80c35a04f8e /tidb/cdc/default/default/upstream/7362136722485557923 {"id":7362136722485557923,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/89c2705f-6f98-4096-aa75-f80c35a04f8e {"id":"89c2705f-6f98-4096-aa75-f80c35a04f8e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131094} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 7 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bd08a07 89c2705f-6f98-4096-aa75-f80c35a04f8e /tidb/cdc/default/default/upstream/7362136722485557923 {"id":7362136722485557923,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/89c2705f-6f98-4096-aa75-f80c35a04f8e {"id":"89c2705f-6f98-4096-aa75-f80c35a04f8e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131094} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 7 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bd08a07 89c2705f-6f98-4096-aa75-f80c35a04f8e /tidb/cdc/default/default/upstream/7362136722485557923 {"id":7362136722485557923,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6811.out cli changefeed create --start-ts=0 '--sink-uri=kafka://127.0.0.1:9092/ticdc-sink-retry-test-17637?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c changefeed-initialize-error Create changefeed successfully! ID: changefeed-initialize-error Info: {"upstream_id":7362136722485557923,"namespace":"default","id":"changefeed-initialize-error","sink_uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-17637?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:31:37.645808314+08:00","start_ts":449349182437720067,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349182437720067,"checkpoint_ts":449349182437720067,"checkpoint_time":"2024-04-26 19:31:37.556"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b33114000c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0, pid:26575, start at 2024-04-26 19:31:34.61217441 +0800 CST m=+5.223688670 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:34.618 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:34.597 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:34.597 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b331c80004 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0, pid:26664, start at 2024-04-26 19:31:34.645546477 +0800 CST m=+5.199602603 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:34.653 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:34.642 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:34.642 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/event_filter/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/event_filter/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/event_filter/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/event_filter/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/event_filter/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table test.finish_mark not exists for 2-th check, retry later + set +x + tso='449349182391320577 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349182391320577 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + start_ts=449349182391320577 + run_cdc_server --workdir /tmp/tidb_cdc_test/synced_status --binary cdc.test [Fri Apr 26 19:31:38 CST 2024] <<<<<< START cdc server in synced_status case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status.1939119393.out server --log-file /tmp/tidb_cdc_test/synced_status/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/synced_status/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [2024/04/26 19:31:37.066 +08:00] [WARN] [diff.go:551] ["checksum is not equal"] [table=`test`.`many_cols`] [where="((TRUE) AND TRUE)"] ["source checksum"=2403192267] ["target checksum"=2492131186] ["get source checksum cost"=1.549917ms] ["get target checksum cost"=1.23117ms] [2024/04/26 19:31:37.068 +08:00] [WARN] [diff.go:893] ["find different row"] [column=val] [row1="{ val: 2337, id: 1, }"] [row2="{ id: 1, val: 2271, }"] [2024/04/26 19:31:37.157 +08:00] [WARN] [diff.go:745] ["rows is not equal"] [table=`test`.`many_cols`] [where="((TRUE) AND TRUE)"] [cost=90.760261ms] [2024/04/26 19:31:37.159 +08:00] [WARN] [diff.go:384] ["check chunk data not equal"] [chunk="{\"id\":0,\"bounds\":[],\"where\":\"((TRUE) AND TRUE)\",\"args\":null,\"state\":\"failed\"}"] + set +x check_changefeed_state http://127.0.0.1:2379 changefeed-initialize-error normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=changefeed-initialize-error + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-initialize-error -s + info='{ "upstream_id": 7362136722485557923, "namespace": "default", "id": "changefeed-initialize-error", "state": "warning", "checkpoint_tso": 449349182437720067, "checkpoint_time": "2024-04-26 19:31:37.556", "error": { "time": "2024-04-26T19:31:37.828153666+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrProcessorUnknown", "message": "changefeed new redo manager injected error" } }' + echo '{ "upstream_id": 7362136722485557923, "namespace": "default", "id": "changefeed-initialize-error", "state": "warning", "checkpoint_tso": 449349182437720067, "checkpoint_time": "2024-04-26 19:31:37.556", "error": { "time": "2024-04-26T19:31:37.828153666+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrProcessorUnknown", "message": "changefeed new redo manager injected error" } }' { "upstream_id": 7362136722485557923, "namespace": "default", "id": "changefeed-initialize-error", "state": "warning", "checkpoint_tso": 449349182437720067, "checkpoint_time": "2024-04-26 19:31:37.556", "error": { "time": "2024-04-26T19:31:37.828153666+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrProcessorUnknown", "message": "changefeed new redo manager injected error" } } ++ echo '{' '"upstream_id":' 7362136722485557923, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"warning",' '"checkpoint_tso":' 449349182437720067, '"checkpoint_time":' '"2024-04-26' '19:31:37.556",' '"error":' '{' '"time":' '"2024-04-26T19:31:37.828153666+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrProcessorUnknown",' '"message":' '"changefeed' new redo manager injected 'error"' '}' '}' ++ jq -r .state + state=warning + [[ ! warning == \n\o\r\m\a\l ]] + echo 'changefeed state warning does not equal to normal' changefeed state warning does not equal to normal + exit 1 run task failed 1-th time, retry later [Fri Apr 26 19:31:39 CST 2024] <<<<<< START cdc server in event_filter case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.event_filter.2806728069.out server --log-file /tmp/tidb_cdc_test/event_filter/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/event_filter/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table test.finish_mark exists check diff failed 1-th time, retry later check_changefeed_status 127.0.0.1:8300 fa0ae7fc-e422-4fe6-a09a-73997c17f90c normal + endpoint=127.0.0.1:8300 + changefeed_id=fa0ae7fc-e422-4fe6-a09a-73997c17f90c + expected_state=normal + field= + error_pattern= ++ curl 127.0.0.1:8300/api/v2/changefeeds/fa0ae7fc-e422-4fe6-a09a-73997c17f90c/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 2221 0 --:--:-- --:--:-- --:--:-- 2238 + info='{"state":"warning","resolved_ts":449349182878908422,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449349182878908422,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449349182878908422,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449349182878908422,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \n\o\r\m\a\l ]] + echo 'changefeed state warning does not equal to normal' changefeed state warning does not equal to normal + exit 1 run task failed 4-th time, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/generate_column/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... check_changefeed_state http://127.0.0.1:2379 changefeed-initialize-error normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=changefeed-initialize-error + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-initialize-error -s + info='{ "upstream_id": 7362136722485557923, "namespace": "default", "id": "changefeed-initialize-error", "state": "normal", "checkpoint_tso": 449349183302533124, "checkpoint_time": "2024-04-26 19:31:40.855", "error": null }' + echo '{ "upstream_id": 7362136722485557923, "namespace": "default", "id": "changefeed-initialize-error", "state": "normal", "checkpoint_tso": 449349183302533124, "checkpoint_time": "2024-04-26 19:31:40.855", "error": null }' { "upstream_id": 7362136722485557923, "namespace": "default", "id": "changefeed-initialize-error", "state": "normal", "checkpoint_tso": 449349183302533124, "checkpoint_time": "2024-04-26 19:31:40.855", "error": null } ++ echo '{' '"upstream_id":' 7362136722485557923, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"normal",' '"checkpoint_tso":' 449349183302533124, '"checkpoint_time":' '"2024-04-26' '19:31:40.855",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7362136722485557923, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"normal",' '"checkpoint_tso":' 449349183302533124, '"checkpoint_time":' '"2024-04-26' '19:31:40.855",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6950.out cli changefeed pause -c changefeed-initialize-error PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:31:41 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ba2cd112-d456-476c-a8eb-a100dedee475 {"id":"ba2cd112-d456-476c-a8eb-a100dedee475","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131099} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2ca775cd ba2cd112-d456-476c-a8eb-a100dedee475 /tidb/cdc/default/default/upstream/7362136951220773788 {"id":7362136951220773788,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ba2cd112-d456-476c-a8eb-a100dedee475 {"id":"ba2cd112-d456-476c-a8eb-a100dedee475","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131099} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2ca775cd ba2cd112-d456-476c-a8eb-a100dedee475 /tidb/cdc/default/default/upstream/7362136951220773788 {"id":7362136951220773788,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ba2cd112-d456-476c-a8eb-a100dedee475 {"id":"ba2cd112-d456-476c-a8eb-a100dedee475","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131099} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2ca775cd ba2cd112-d456-476c-a8eb-a100dedee475 /tidb/cdc/default/default/upstream/7362136951220773788 {"id":7362136951220773788,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + config_path=conf/changefeed.toml + SINK_URI='mysql://root@127.0.0.1:3306/?max-txn-row=1' + run_cdc_cli changefeed create --start-ts=449349182391320577 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status/conf/changefeed.toml + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status.cli.19445.out cli changefeed create --start-ts=449349182391320577 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status/conf/changefeed.toml check diff failed 2-th time, retry later Create changefeed successfully! ID: test-1 Info: {"upstream_id":7362136951220773788,"namespace":"default","id":"test-1","sink_uri":"mysql://root@127.0.0.1:3306/?max-txn-row=1","create_time":"2024-04-26T19:31:42.366675906+08:00","start_ts":449349182391320577,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":120,"checkpoint_interval":20}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349182391320577,"checkpoint_ts":449349182391320577,"checkpoint_time":"2024-04-26 19:31:37.379"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:31:42 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/09d8c363-f9dc-4c2f-ab30-a979d1f83c55 {"id":"09d8c363-f9dc-4c2f-ab30-a979d1f83c55","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131100} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2ca01cce 09d8c363-f9dc-4c2f-ab30-a979d1f83c55 /tidb/cdc/default/default/upstream/7362136956514520988 {"id":7362136956514520988,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/09d8c363-f9dc-4c2f-ab30-a979d1f83c55 {"id":"09d8c363-f9dc-4c2f-ab30-a979d1f83c55","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131100} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2ca01cce 09d8c363-f9dc-4c2f-ab30-a979d1f83c55 /tidb/cdc/default/default/upstream/7362136956514520988 {"id":7362136956514520988,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/09d8c363-f9dc-4c2f-ab30-a979d1f83c55 {"id":"09d8c363-f9dc-4c2f-ab30-a979d1f83c55","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131100} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2ca01cce 09d8c363-f9dc-4c2f-ab30-a979d1f83c55 /tidb/cdc/default/default/upstream/7362136956514520988 {"id":7362136956514520988,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.event_filter.cli.28128.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-event-filter-12022?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --server=127.0.0.1:8300 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/event_filter/conf/cf.toml Create changefeed successfully! ID: c1694c6e-4862-4bc4-af6a-527abe08bc53 Info: {"upstream_id":7362136956514520988,"namespace":"default","id":"c1694c6e-4862-4bc4-af6a-527abe08bc53","sink_uri":"kafka://127.0.0.1:9092/ticdc-event-filter-12022?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:31:43.274505938+08:00","start_ts":449349183903367171,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["event_filter.*"],"event_filters":[{"matcher":["event_filter.t1"],"ignore_event":["drop table","delete"],"ignore_sql":null,"ignore_insert_value_expr":"id = 2 or city = 'tokyo'","ignore_update_new_value_expr":"","ignore_update_old_value_expr":"","ignore_delete_value_expr":""},{"matcher":["event_filter.t_truncate"],"ignore_event":["truncate table"],"ignore_sql":null,"ignore_insert_value_expr":"","ignore_update_new_value_expr":"","ignore_update_old_value_expr":"","ignore_delete_value_expr":""},{"matcher":["event_filter.t_alter"],"ignore_event":["alter table"],"ignore_sql":null,"ignore_insert_value_expr":"","ignore_update_new_value_expr":"","ignore_update_old_value_expr":"","ignore_delete_value_expr":""}]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349183903367171,"checkpoint_ts":449349183903367171,"checkpoint_time":"2024-04-26 19:31:43.147"} PASS + set +x check_changefeed_state http://127.0.0.1:2379 changefeed-initialize-error stopped changefeed new redo manager injected error + endpoints=http://127.0.0.1:2379 + changefeed_id=changefeed-initialize-error + expected_state=stopped + error_msg=changefeed + tls_dir=error + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-initialize-error -s coverage: 2.5% of statements in github.com/pingcap/tiflow/... start tidb cluster in /tmp/tidb_cdc_test/generate_column Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... + info='{ "upstream_id": 7362136722485557923, "namespace": "default", "id": "changefeed-initialize-error", "state": "stopped", "checkpoint_tso": 449349183302533124, "checkpoint_time": "2024-04-26 19:31:40.855", "error": { "time": "2024-04-26T19:31:37.828153666+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrProcessorUnknown", "message": "changefeed new redo manager injected error" } }' + echo '{ "upstream_id": 7362136722485557923, "namespace": "default", "id": "changefeed-initialize-error", "state": "stopped", "checkpoint_tso": 449349183302533124, "checkpoint_time": "2024-04-26 19:31:40.855", "error": { "time": "2024-04-26T19:31:37.828153666+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrProcessorUnknown", "message": "changefeed new redo manager injected error" } }' { "upstream_id": 7362136722485557923, "namespace": "default", "id": "changefeed-initialize-error", "state": "stopped", "checkpoint_tso": 449349183302533124, "checkpoint_time": "2024-04-26 19:31:40.855", "error": { "time": "2024-04-26T19:31:37.828153666+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrProcessorUnknown", "message": "changefeed new redo manager injected error" } } ++ echo '{' '"upstream_id":' 7362136722485557923, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"stopped",' '"checkpoint_tso":' 449349183302533124, '"checkpoint_time":' '"2024-04-26' '19:31:40.855",' '"error":' '{' '"time":' '"2024-04-26T19:31:37.828153666+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrProcessorUnknown",' '"message":' '"changefeed' new redo manager injected 'error"' '}' '}' ++ jq -r .state + state=stopped + [[ ! stopped == \s\t\o\p\p\e\d ]] ++ echo '{' '"upstream_id":' 7362136722485557923, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"stopped",' '"checkpoint_tso":' 449349183302533124, '"checkpoint_time":' '"2024-04-26' '19:31:40.855",' '"error":' '{' '"time":' '"2024-04-26T19:31:37.828153666+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrProcessorUnknown",' '"message":' '"changefeed' new redo manager injected 'error"' '}' '}' ++ jq -r .error.message + message='changefeed new redo manager injected error' + [[ ! changefeed new redo manager injected error =~ changefeed ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.7034.out cli changefeed resume -c changefeed-initialize-error + set +x ++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 221 100 221 0 0 2954 0 --:--:-- --:--:-- --:--:-- 2986 + synced_status='{"synced":true,"sink_checkpoint_ts":"2024-04-26 19:31:37.379","puller_resolved_ts":"1970-01-01 08:00:00.000","last_synced_ts":"1970-01-01 08:00:00.000","now_ts":"2024-04-26 19:31:43.000","info":"Data syncing is finished"}' ++ echo '{"synced":true,"sink_checkpoint_ts":"2024-04-26' '19:31:37.379","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-04-26' '19:31:43.000","info":"Data' syncing is 'finished"}' ++ jq .synced + status=true ++ echo '{"synced":true,"sink_checkpoint_ts":"2024-04-26' '19:31:37.379","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-04-26' '19:31:43.000","info":"Data' syncing is 'finished"}' ++ jq -r .sink_checkpoint_ts + sink_checkpoint_ts='2024-04-26 19:31:37.379' ++ echo '{"synced":true,"sink_checkpoint_ts":"2024-04-26' '19:31:37.379","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-04-26' '19:31:43.000","info":"Data' syncing is 'finished"}' ++ jq -r .puller_resolved_ts + puller_resolved_ts='1970-01-01 08:00:00.000' ++ echo '{"synced":true,"sink_checkpoint_ts":"2024-04-26' '19:31:37.379","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-04-26' '19:31:43.000","info":"Data' syncing is 'finished"}' ++ jq -r .last_synced_ts + last_synced_ts='1970-01-01 08:00:00.000' + '[' true '!=' true ']' + '[' '1970-01-01 08:00:00.000' '!=' '1970-01-01 08:00:00.000' ']' + '[' '1970-01-01 08:00:00.000' '!=' '1970-01-01 08:00:00.000' ']' ++ date '+%Y-%m-%d %H:%M:%S' + current='2024-04-26 19:31:44' + echo 'sink_checkpoint_ts is 2024-04-26' 19:31:37.379 sink_checkpoint_ts is 2024-04-26 19:31:37.379 ++ date -d '2024-04-26 19:31:37.379' +%s + checkpoint_timestamp=1714131097 ++ date -d '2024-04-26 19:31:44' +%s + current_timestamp=1714131104 + '[' 7 -gt 300 ']' + run_sql 'USE TEST;Create table t1(a int primary key, b int);insert into t1 values(1,2);insert into t1 values(2,3);' + check_table_exists test.t1 127.0.0.1 3306 table test.t1 not exists for 1-th check, retry later check diff failed 3-th time, retry later PASS coverage: 2.1% of statements in github.com/pingcap/tiflow/... + set +x [Fri Apr 26 19:31:44 CST 2024] <<<<<< START kafka consumer in event_filter case >>>>>> table event_filter.t1 does not exists table event_filter.t1 not exists for 1-th check, retry later + set +x check_changefeed_state http://127.0.0.1:2379 changefeed-initialize-error normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=changefeed-initialize-error + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-initialize-error -s + info='{ "upstream_id": 7362136722485557923, "namespace": "default", "id": "changefeed-initialize-error", "state": "normal", "checkpoint_tso": 449349183302533124, "checkpoint_time": "2024-04-26 19:31:40.855", "error": null }' + echo '{ "upstream_id": 7362136722485557923, "namespace": "default", "id": "changefeed-initialize-error", "state": "normal", "checkpoint_tso": 449349183302533124, "checkpoint_time": "2024-04-26 19:31:40.855", "error": null }' { "upstream_id": 7362136722485557923, "namespace": "default", "id": "changefeed-initialize-error", "state": "normal", "checkpoint_tso": 449349183302533124, "checkpoint_time": "2024-04-26 19:31:40.855", "error": null } ++ echo '{' '"upstream_id":' 7362136722485557923, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"normal",' '"checkpoint_tso":' 449349183302533124, '"checkpoint_time":' '"2024-04-26' '19:31:40.855",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7362136722485557923, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"normal",' '"checkpoint_tso":' 449349183302533124, '"checkpoint_time":' '"2024-04-26' '19:31:40.855",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.7140.out cli changefeed remove -c changefeed-initialize-error =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/ddl_attributes/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table test.t1 exists + sleep 5 Changefeed remove successfully. ID: changefeed-initialize-error CheckpointTs: 449349184613515268 SinkURI: kafka://127.0.0.1:9092/ticdc-sink-retry-test-17637?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... check diff failed 4-th time, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table event_filter.t1 exists table event_filter.t_normal not exists for 1-th check, retry later + set +x check_changefeed_status 127.0.0.1:8300 fa0ae7fc-e422-4fe6-a09a-73997c17f90c normal + endpoint=127.0.0.1:8300 + changefeed_id=fa0ae7fc-e422-4fe6-a09a-73997c17f90c + expected_state=normal + field= + error_pattern= ++ curl 127.0.0.1:8300/api/v2/changefeeds/fa0ae7fc-e422-4fe6-a09a-73997c17f90c/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 2230 0 --:--:-- --:--:-- --:--:-- 2238 + info='{"state":"warning","resolved_ts":449349184976060422,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449349184976060422,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449349184976060422,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449349184976060422,"checkpoint_ts":449349177806159902,"last_warning":{"time":"2024-04-26T19:31:22.833012451+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \n\o\r\m\a\l ]] + echo 'changefeed state warning does not equal to normal' changefeed state warning does not equal to normal + exit 1 run task failed 5-th time, retry later cdc.test: no process found wait process cdc.test exit for 1-th time... process cdc.test already exit [Fri Apr 26 19:31:48 CST 2024] <<<<<< run test case changefeed_error success! >>>>>> Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) start tidb cluster in /tmp/tidb_cdc_test/ddl_attributes Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... check diff failed 5-th time, retry later table event_filter.t_normal exists table event_filter.t_truncate not exists for 1-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table event_filter.t_truncate exists table event_filter.t_alter not exists for 1-th check, retry later check diff failed 6-th time, retry later ++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 243 100 243 0 0 1906 0 --:--:-- --:--:-- --:--:-- 1898 100 243 100 243 0 0 1905 0 --:--:-- --:--:-- --:--:-- 1898 + synced_status='{"synced":false,"sink_checkpoint_ts":"2024-04-26 19:31:50.829","puller_resolved_ts":"2024-04-26 19:31:44.028","last_synced_ts":"2024-04-26 19:31:44.129","now_ts":"2024-04-26 19:31:51.000","info":"The data syncing is not finished, please wait"}' ++ echo '{"synced":false,"sink_checkpoint_ts":"2024-04-26' '19:31:50.829","puller_resolved_ts":"2024-04-26' '19:31:44.028","last_synced_ts":"2024-04-26' '19:31:44.129","now_ts":"2024-04-26' '19:31:51.000","info":"The' data syncing is not finished, please 'wait"}' ++ jq .synced + status=false + '[' false '!=' false ']' ++ echo '{"synced":false,"sink_checkpoint_ts":"2024-04-26' '19:31:50.829","puller_resolved_ts":"2024-04-26' '19:31:44.028","last_synced_ts":"2024-04-26' '19:31:44.129","now_ts":"2024-04-26' '19:31:51.000","info":"The' data syncing is not finished, please 'wait"}' ++ jq -r .info + info='The data syncing is not finished, please wait' + '[' 'The data syncing is not finished, please wait' '!=' 'The data syncing is not finished, please wait' ']' + sleep 130 Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table event_filter.t_alter exists check diff failed 7-th time, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table event_filter.finish_mark exists check diff failed 1-th time, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b46190001e Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z9nq8-r4p88, pid:33279, start at 2024-04-26 19:31:54.12877765 +0800 CST m=+5.156535403 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:54.135 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:54.135 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:54.135 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b46190001e Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z9nq8-r4p88, pid:33279, start at 2024-04-26 19:31:54.12877765 +0800 CST m=+5.156535403 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:54.135 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:54.135 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:54.135 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b462f40002 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z9nq8-r4p88, pid:33361, start at 2024-04-26 19:31:54.1732895 +0800 CST m=+5.150945772 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:54.180 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:54.173 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:54.173 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/generate_column/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/generate_column/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/generate_column/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/generate_column/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/generate_column/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } check diff failed 8-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 2-th time, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.generate_column.cli.34749.out cli tso query --pd=http://127.0.0.1:2379 check diff failed 9-th time, retry later check_changefeed_status 127.0.0.1:8300 fa0ae7fc-e422-4fe6-a09a-73997c17f90c normal + endpoint=127.0.0.1:8300 + changefeed_id=fa0ae7fc-e422-4fe6-a09a-73997c17f90c + expected_state=normal + field= + error_pattern= ++ curl 127.0.0.1:8300/api/v2/changefeeds/fa0ae7fc-e422-4fe6-a09a-73997c17f90c/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 86 100 86 0 0 777 0 --:--:-- --:--:-- --:--:-- 781 + info='{"state":"normal","resolved_ts":449349187597238280,"checkpoint_ts":449349187597238280}' + echo '{"state":"normal","resolved_ts":449349187597238280,"checkpoint_ts":449349187597238280}' {"state":"normal","resolved_ts":449349187597238280,"checkpoint_ts":449349187597238280} ++ echo '{"state":"normal","resolved_ts":449349187597238280,"checkpoint_ts":449349187597238280}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] + [[ -z '' ]] ++ echo '{"state":"normal","resolved_ts":449349187597238280,"checkpoint_ts":449349187597238280}' ++ jq -r .last_error + error_msg=null + [[ ! null == \n\u\l\l ]] ++ echo '{"state":"normal","resolved_ts":449349187597238280,"checkpoint_ts":449349187597238280}' ++ jq -r .last_warning + error_msg=null + [[ ! null == \n\u\l\l ]] + exit 0 run task successfully table kafka_sink_error_resume.t1 exists table kafka_sink_error_resume.t2 exists check diff successfully check diff failed 1-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff successfully + set +x + tso='449349187688202241 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349187688202241 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Fri Apr 26 19:31:59 CST 2024] <<<<<< START cdc server in generate_column case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.generate_column.3478634788.out server --log-file /tmp/tidb_cdc_test/generate_column/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/generate_column/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... check diff failed 10-th time, retry later cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:32:00 CST 2024] <<<<<< run test case event_filter success! >>>>>> VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b4b9880011 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-tmlzt-m5s5x, pid:23609, start at 2024-04-26 19:31:59.732987059 +0800 CST m=+5.279927639 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:59.741 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:59.714 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:59.714 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/ddl_sequence/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:32:02 CST 2024] <<<<<< run test case kafka_sink_error_resume success! >>>>>> check diff failed 11-th time, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:32:02 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5172ec2e-8b94-4165-8c2a-e191cb7d0969 {"id":"5172ec2e-8b94-4165-8c2a-e191cb7d0969","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131119} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2cee93ce 5172ec2e-8b94-4165-8c2a-e191cb7d0969 /tidb/cdc/default/default/upstream/7362137041106578631 {"id":7362137041106578631,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5172ec2e-8b94-4165-8c2a-e191cb7d0969 {"id":"5172ec2e-8b94-4165-8c2a-e191cb7d0969","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131119} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2cee93ce 5172ec2e-8b94-4165-8c2a-e191cb7d0969 /tidb/cdc/default/default/upstream/7362137041106578631 {"id":7362137041106578631,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5172ec2e-8b94-4165-8c2a-e191cb7d0969 {"id":"5172ec2e-8b94-4165-8c2a-e191cb7d0969","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131119} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2cee93ce 5172ec2e-8b94-4165-8c2a-e191cb7d0969 /tidb/cdc/default/default/upstream/7362137041106578631 {"id":7362137041106578631,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.generate_column.cli.34832.out cli changefeed create --start-ts=449349187688202241 '--sink-uri=kafka://127.0.0.1:9092/ticdc-generate-column-test-1587?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b4b9880011 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-tmlzt-m5s5x, pid:23609, start at 2024-04-26 19:31:59.732987059 +0800 CST m=+5.279927639 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:59.741 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:59.714 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:59.714 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b4b8900015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-tmlzt-m5s5x, pid:23691, start at 2024-04-26 19:31:59.694041713 +0800 CST m=+5.190633821 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:33:59.700 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:31:59.703 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:21:59.703 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/ddl_attributes/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/ddl_attributes/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/ddl_attributes/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/ddl_attributes/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/ddl_attributes/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Create changefeed successfully! ID: bf1f867d-0dd5-40b5-9937-7fe463d0ebc2 Info: {"upstream_id":7362137041106578631,"namespace":"default","id":"bf1f867d-0dd5-40b5-9937-7fe463d0ebc2","sink_uri":"kafka://127.0.0.1:9092/ticdc-generate-column-test-1587?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:32:02.600555933+08:00","start_ts":449349187688202241,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349187688202241,"checkpoint_ts":449349187688202241,"checkpoint_time":"2024-04-26 19:31:57.585"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... start tidb cluster in /tmp/tidb_cdc_test/ddl_sequence Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... + set +x [Fri Apr 26 19:32:04 CST 2024] <<<<<< START kafka consumer in generate_column case >>>>>> check diff failed 12-th time, retry later table generate_column.t not exists for 1-th check, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_attributes.cli.25086.out cli tso query --pd=http://127.0.0.1:2379 [2024/04/26 19:32:03.208 +08:00] [WARN] [diff.go:551] ["checksum is not equal"] [table=`test`.`base_for_view`] [where="((TRUE) AND TRUE)"] ["source checksum"=976018642] ["target checksum"=4181325105] ["get source checksum cost"=1.790488ms] ["get target checksum cost"=1.728331ms] [2024/04/26 19:32:03.210 +08:00] [WARN] [diff.go:895] ["target had superfluous data"] [row="{ id: 1, user_id: 0, amount: 0, }"] [2024/04/26 19:32:03.297 +08:00] [WARN] [diff.go:895] ["target had superfluous data"] [row="{ id: 2, user_id: 0, amount: 10, }"] [2024/04/26 19:32:03.397 +08:00] [WARN] [diff.go:895] ["target had superfluous data"] [row="{ user_id: 2, amount: 2, id: 7, }"] [2024/04/26 19:32:03.497 +08:00] [WARN] [diff.go:895] ["target had superfluous data"] [row="{ id: 8, user_id: 2, amount: 12, }"] [2024/04/26 19:32:03.597 +08:00] [WARN] [diff.go:895] ["target had superfluous data"] [row="{ id: 13, user_id: 4, amount: 4, }"] [2024/04/26 19:32:03.698 +08:00] [WARN] [diff.go:895] ["target had superfluous data"] [row="{ id: 14, user_id: 4, amount: 14, }"] [2024/04/26 19:32:03.798 +08:00] [WARN] [diff.go:895] ["target had superfluous data"] [row="{ id: 19, user_id: 6, amount: 6, }"] [2024/04/26 19:32:03.898 +08:00] [WARN] [diff.go:895] ["target had superfluous data"] [row="{ id: 20, user_id: 6, amount: 16, }"] [2024/04/26 19:32:03.998 +08:00] [WARN] [diff.go:895] ["target had superfluous data"] [row="{ id: 25, user_id: 8, amount: 8, }"] [2024/04/26 19:32:04.098 +08:00] [WARN] [diff.go:895] ["target had superfluous data"] [row="{ id: 26, user_id: 8, amount: 18, }"] [2024/04/26 19:32:04.199 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 31, user_id: 10, amount: 1111, }"] [row2="{ amount: 10, id: 31, user_id: 10, }"] [2024/04/26 19:32:04.299 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 32, user_id: 10, amount: 1111, }"] [row2="{ id: 32, user_id: 10, amount: 20, }"] [2024/04/26 19:32:04.399 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 37, user_id: 12, amount: 1111, }"] [row2="{ id: 37, user_id: 12, amount: 12, }"] [2024/04/26 19:32:04.499 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 38, user_id: 12, amount: 1111, }"] [row2="{ user_id: 12, amount: 22, id: 38, }"] [2024/04/26 19:32:04.599 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 43, user_id: 14, amount: 1111, }"] [row2="{ user_id: 14, amount: 14, id: 43, }"] [2024/04/26 19:32:04.699 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ user_id: 14, amount: 1111, id: 44, }"] [row2="{ id: 44, user_id: 14, amount: 24, }"] [2024/04/26 19:32:04.800 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 49, user_id: 16, amount: 1111, }"] [row2="{ id: 49, user_id: 16, amount: 16, }"] [2024/04/26 19:32:04.900 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ user_id: 16, amount: 1111, id: 50, }"] [row2="{ id: 50, user_id: 16, amount: 26, }"] [2024/04/26 19:32:05.000 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ user_id: 18, amount: 1111, id: 55, }"] [row2="{ id: 55, user_id: 18, amount: 18, }"] [2024/04/26 19:32:05.100 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 56, user_id: 18, amount: 1111, }"] [row2="{ id: 56, user_id: 18, amount: 28, }"] [2024/04/26 19:32:05.201 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 61, user_id: 20, amount: 1111, }"] [row2="{ id: 61, user_id: 20, amount: 20, }"] [2024/04/26 19:32:05.301 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 62, user_id: 20, amount: 1111, }"] [row2="{ id: 62, user_id: 20, amount: 30, }"] [2024/04/26 19:32:05.401 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ user_id: 22, amount: 1111, id: 67, }"] [row2="{ id: 67, user_id: 22, amount: 22, }"] [2024/04/26 19:32:05.502 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 68, user_id: 22, amount: 1111, }"] [row2="{ amount: 32, id: 68, user_id: 22, }"] [2024/04/26 19:32:05.602 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 73, user_id: 24, amount: 1111, }"] [row2="{ id: 73, user_id: 24, amount: 24, }"] [2024/04/26 19:32:05.702 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 74, user_id: 24, amount: 1111, }"] [row2="{ amount: 34, id: 74, user_id: 24, }"] [2024/04/26 19:32:05.802 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 79, user_id: 26, amount: 1111, }"] [row2="{ user_id: 26, amount: 26, id: 79, }"] [2024/04/26 19:32:05.902 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 80, user_id: 26, amount: 1111, }"] [row2="{ id: 80, user_id: 26, amount: 36, }"] [2024/04/26 19:32:06.002 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 85, user_id: 28, amount: 1111, }"] [row2="{ id: 85, user_id: 28, amount: 28, }"] check diff failed 13-th time, retry later [2024/04/26 19:32:06.103 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 86, user_id: 28, amount: 1111, }"] [row2="{ id: 86, user_id: 28, amount: 38, }"] [2024/04/26 19:32:06.203 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 91, user_id: 30, amount: 1111, }"] [row2="{ id: 91, user_id: 30, amount: 30, }"] [2024/04/26 19:32:06.303 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ amount: 1111, id: 92, user_id: 30, }"] [row2="{ amount: 40, id: 92, user_id: 30, }"] + set +x + tso='449349189648777217 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349189648777217 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Fri Apr 26 19:32:06 CST 2024] <<<<<< START cdc server in ddl_attributes case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_attributes.2512325125.out server --log-file /tmp/tidb_cdc_test/ddl_attributes/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_attributes/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [2024/04/26 19:32:06.403 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ amount: 1111, id: 97, user_id: 32, }"] [row2="{ user_id: 32, amount: 32, id: 97, }"] [2024/04/26 19:32:06.504 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 98, user_id: 32, amount: 1111, }"] [row2="{ id: 98, user_id: 32, amount: 42, }"] [2024/04/26 19:32:06.604 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 103, user_id: 34, amount: 1111, }"] [row2="{ id: 103, user_id: 34, amount: 34, }"] [2024/04/26 19:32:06.704 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ amount: 1111, id: 104, user_id: 34, }"] [row2="{ id: 104, user_id: 34, amount: 44, }"] [2024/04/26 19:32:06.804 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ amount: 1111, id: 109, user_id: 36, }"] [row2="{ id: 109, user_id: 36, amount: 36, }"] table generate_column.t not exists for 2-th check, retry later [2024/04/26 19:32:06.904 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 110, user_id: 36, amount: 1111, }"] [row2="{ id: 110, user_id: 36, amount: 46, }"] [2024/04/26 19:32:07.005 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 115, user_id: 38, amount: 1111, }"] [row2="{ id: 115, user_id: 38, amount: 38, }"] [2024/04/26 19:32:07.105 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 116, user_id: 38, amount: 1111, }"] [row2="{ id: 116, user_id: 38, amount: 48, }"] [2024/04/26 19:32:07.205 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 121, user_id: 40, amount: 1111, }"] [row2="{ id: 121, user_id: 40, amount: 40, }"] [2024/04/26 19:32:07.305 +08:00] [WARN] [diff.go:893] ["find different row"] [column=amount] [row1="{ id: 122, user_id: 40, amount: 1111, }"] [row2="{ id: 122, user_id: 40, amount: 50, }"] [2024/04/26 19:32:07.405 +08:00] [WARN] [diff.go:745] ["rows is not equal"] [table=`test`.`base_for_view`] [where="((TRUE) AND TRUE)"] [cost=4.197499461s] [2024/04/26 19:32:07.407 +08:00] [WARN] [diff.go:384] ["check chunk data not equal"] [chunk="{\"id\":0,\"bounds\":[],\"where\":\"((TRUE) AND TRUE)\",\"args\":null,\"state\":\"failed\"}"] Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release check diff failed 14-th time, retry later table generate_column.t exists table generate_column.t1 exists check diff failed 1-th time, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:32:09 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/df3ea48c-cd0a-47b9-a05a-6efa00e73735 {"id":"df3ea48c-cd0a-47b9-a05a-6efa00e73735","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131126} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2d03fcd3 df3ea48c-cd0a-47b9-a05a-6efa00e73735 /tidb/cdc/default/default/upstream/7362137066793411577 {"id":7362137066793411577,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/df3ea48c-cd0a-47b9-a05a-6efa00e73735 {"id":"df3ea48c-cd0a-47b9-a05a-6efa00e73735","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131126} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2d03fcd3 df3ea48c-cd0a-47b9-a05a-6efa00e73735 /tidb/cdc/default/default/upstream/7362137066793411577 {"id":7362137066793411577,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/df3ea48c-cd0a-47b9-a05a-6efa00e73735 {"id":"df3ea48c-cd0a-47b9-a05a-6efa00e73735","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131126} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2d03fcd3 df3ea48c-cd0a-47b9-a05a-6efa00e73735 /tidb/cdc/default/default/upstream/7362137066793411577 {"id":7362137066793411577,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_attributes.cli.25183.out cli changefeed create --start-ts=449349189648777217 '--sink-uri=kafka://127.0.0.1:9092/ticdc-ddl-attributes-test-5714?protocol=open-protocol&partition-num=4&kafka-version=2.4.1' Create changefeed successfully! ID: a3fc66a0-889f-4afc-95ad-6e6eb6cbd37d Info: {"upstream_id":7362137066793411577,"namespace":"default","id":"a3fc66a0-889f-4afc-95ad-6e6eb6cbd37d","sink_uri":"kafka://127.0.0.1:9092/ticdc-ddl-attributes-test-5714?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1","create_time":"2024-04-26T19:32:10.10046719+08:00","start_ts":449349189648777217,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349189648777217,"checkpoint_ts":449349189648777217,"checkpoint_time":"2024-04-26 19:32:05.064"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... check diff failed 15-th time, retry later check diff successfully wait process cdc.test exit for 1-th time... + set +x [Fri Apr 26 19:32:11 CST 2024] <<<<<< START kafka consumer in ddl_attributes case >>>>>> wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:32:12 CST 2024] <<<<<< run test case generate_column success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 16-th time, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sql_mode/run.sh using Sink-Type: kafka... <<================= +++ dirname /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sql_mode/run.sh ++ cd /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sql_mode ++ pwd + CUR=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sql_mode + source /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sql_mode/../_utils/test_prepare ++ UP_TIDB_HOST=127.0.0.1 ++ UP_TIDB_PORT=4000 ++ UP_TIDB_OTHER_PORT=4001 ++ UP_TIDB_STATUS=10080 ++ UP_TIDB_OTHER_STATUS=10081 ++ DOWN_TIDB_HOST=127.0.0.1 ++ DOWN_TIDB_PORT=3306 ++ DOWN_TIDB_STATUS=20080 ++ TLS_TIDB_HOST=127.0.0.1 ++ TLS_TIDB_PORT=3307 ++ TLS_TIDB_STATUS=30080 ++ UP_PD_HOST_1=127.0.0.1 ++ UP_PD_PORT_1=2379 ++ UP_PD_PEER_PORT_1=2380 ++ UP_PD_HOST_2=127.0.0.1 ++ UP_PD_PORT_2=2679 ++ UP_PD_PEER_PORT_2=2680 ++ UP_PD_HOST_3=127.0.0.1 ++ UP_PD_PORT_3=2779 ++ UP_PD_PEER_PORT_3=2780 ++ DOWN_PD_HOST=127.0.0.1 ++ DOWN_PD_PORT=2479 ++ DOWN_PD_PEER_PORT=2480 ++ TLS_PD_HOST=127.0.0.1 ++ TLS_PD_PORT=2579 ++ TLS_PD_PEER_PORT=2580 ++ UP_TIKV_HOST_1=127.0.0.1 ++ UP_TIKV_PORT_1=20160 ++ UP_TIKV_STATUS_PORT_1=20181 ++ UP_TIKV_HOST_2=127.0.0.1 ++ UP_TIKV_PORT_2=20161 ++ UP_TIKV_STATUS_PORT_2=20182 ++ UP_TIKV_HOST_3=127.0.0.1 ++ UP_TIKV_PORT_3=20162 ++ UP_TIKV_STATUS_PORT_3=20183 ++ DOWN_TIKV_HOST=127.0.0.1 ++ DOWN_TIKV_PORT=21160 ++ DOWN_TIKV_STATUS_PORT=21180 ++ TLS_TIKV_HOST=127.0.0.1 ++ TLS_TIKV_PORT=22160 ++ TLS_TIKV_STATUS_PORT=22180 +++ cat /tmp/tidb_cdc_test/KAFKA_VERSION +++ echo 2.4.1 ++ KAFKA_VERSION=2.4.1 + WORK_DIR=/tmp/tidb_cdc_test/sql_mode + CDC_BINARY=cdc.test + SINK_TYPE=kafka + CDC_COUNT=3 + DB_COUNT=4 + rm -rf /tmp/tidb_cdc_test/sql_mode + mkdir -p /tmp/tidb_cdc_test/sql_mode + start_tidb_cluster --workdir /tmp/tidb_cdc_test/sql_mode The 1 times to try to start tidb cluster... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) pass check, checkpoint tso not forward after 10s run task successfully wait process 8770 exit for 1-th time... wait process 8770 exit for 2-th time... wait process 8770 exit for 3-th time... wait process 8770 exit for 4-th time... /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils/kill_cdc_pid: line 19: kill: (8770) - No such process wait process 8770 exit for 5-th time... process 8770 already exit [Fri Apr 26 19:32:08 CST 2024] <<<<<< START cdc server in ddl_only_block_related_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_only_block_related_table.95039505.out server --log-file /tmp/tidb_cdc_test/ddl_only_block_related_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_only_block_related_table/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:32:11 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: changefeedID: default/ddl-only-block-related-table {UpstreamID:7362136707987132970 Namespace:default ID:ddl-only-block-related-table SinkURI:kafka://127.0.0.1:9092/ticdc-common-1-test-26686?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-04-26 19:30:43.972484915 +0800 CST StartTs:449349168355868674 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc00154fc20 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-20-g16f5d59f9 Epoch:449349168382083077} {CheckpointTs:449349172288028686 MinTableBarrierTs:449349190992265223 AdminJobType:noop} span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449349172288028686, checkpointTs: 449349172288028686, state: Preparing span: {table_id:108,start_key:7480000000000000ff6c5f720000000000fa,end_key:7480000000000000ff6c5f730000000000fa}, resolvedTs: 449349172288028686, checkpointTs: 449349172288028686, state: Preparing span: {table_id:110,start_key:7480000000000000ff6e5f720000000000fa,end_key:7480000000000000ff6e5f730000000000fa}, resolvedTs: 449349172288028686, checkpointTs: 449349172288028686, state: Preparing span: {table_id:112,start_key:7480000000000000ff705f720000000000fa,end_key:7480000000000000ff705f730000000000fa}, resolvedTs: 449349172288028686, checkpointTs: 449349172288028686, state: Preparing *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/748f6965-3fc0-4c9c-883f-95bea8eb4fd8 {"id":"748f6965-3fc0-4c9c-883f-95bea8eb4fd8","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131128} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bc237cc 748f6965-3fc0-4c9c-883f-95bea8eb4fd8 /tidb/cdc/default/default/changefeed/info/ddl-only-block-related-table {"upstream-id":7362136707987132970,"namespace":"default","changefeed-id":"ddl-only-block-related-table","sink-uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-26686?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:30:43.972484915+08:00","start-ts":449349168355868674,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349168382083077} /tidb/cdc/default/default/changefeed/status/ddl-only-block-related-table {"checkpoint-ts":449349172288028686,"min-table-barrier-ts":449349191254409224,"admin-job-type":0} /tidb/cdc/default/default/task/position/748f6965-3fc0-4c9c-883f-95bea8eb4fd8/ddl-only-block-related-table {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7362136707987132970 {"id":7362136707987132970,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-only-block-related-table {UpstreamID:7362136707987132970 Namespace:default ID:ddl-only-block-related-table SinkURI:kafka://127.0.0.1:9092/ticdc-common-1-test-26686?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-04-26 19:30:43.972484915 +0800 CST StartTs:449349168355868674 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc00154fc20 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-20-g16f5d59f9 Epoch:449349168382083077} {CheckpointTs:449349172288028686 MinTableBarrierTs:449349190992265223 AdminJobType:noop} span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449349172288028686, checkpointTs: 449349172288028686, state: Preparing span: {table_id:108,start_key:7480000000000000ff6c5f720000000000fa,end_key:7480000000000000ff6c5f730000000000fa}, resolvedTs: 449349172288028686, checkpointTs: 449349172288028686, state: Preparing span: {table_id:110,start_key:7480000000000000ff6e5f720000000000fa,end_key:7480000000000000ff6e5f730000000000fa}, resolvedTs: 449349172288028686, checkpointTs: 449349172288028686, state: Preparing span: {table_id:112,start_key:7480000000000000ff705f720000000000fa,end_key:7480000000000000ff705f730000000000fa}, resolvedTs: 449349172288028686, checkpointTs: 449349172288028686, state: Preparing *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/748f6965-3fc0-4c9c-883f-95bea8eb4fd8 {"id":"748f6965-3fc0-4c9c-883f-95bea8eb4fd8","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131128} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bc237cc 748f6965-3fc0-4c9c-883f-95bea8eb4fd8 /tidb/cdc/default/default/changefeed/info/ddl-only-block-related-table {"upstream-id":7362136707987132970,"namespace":"default","changefeed-id":"ddl-only-block-related-table","sink-uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-26686?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:30:43.972484915+08:00","start-ts":449349168355868674,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-chec+ grep -q 'failed to get info:' k-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349168382083077} /tidb/cdc/default/default/changefeed/status/ddl-only-block-related-table {"checkpoint-ts":449349172288028686,"min-table-barrier-ts":449349191254409224,"admin-job-type":0} /tidb/cdc/default/default/task/position/748f6965-3fc0-4c9c-883f-95bea8eb4fd8/ddl-only-block-related-table {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7362136707987132970 {"id":7362136707987132970,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-only-block-related-table {UpstreamID:7362136707987132970 Namespace:default ID:ddl-only-block-related-table SinkURI:kafka://127.0.0.1:9092/ticdc-common-1-test-26686?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-04-26 19:30:43.972484915 +0800 CST StartTs:449349168355868674 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc00154fc20 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-20-g16f5d59f9 Epoch:449349168382083077} {CheckpointTs:449349172288028686 MinTableBarrierTs:449349190992265223 AdminJobType:noop} span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449349172288028686, checkpointTs: 449349172288028686, state: Preparing span: {table_id:108,start_key:7480000000000000ff6c5f720000000000fa,end_key:7480000000000000ff6c5f730000000000fa}, resolvedTs: 449349172288028686, checkpointTs: 449349172288028686, state: Preparing span: {table_id:110,start_key:7480000000000000ff6e5f720000000000fa,end_key:7480000000000000ff6e5f730000000000fa}, resolvedTs: 449349172288028686, checkpointTs: 449349172288028686, state: Preparing span: {table_id:112,start_key:7480000000000000ff705f720000000000fa,end_key:7480000000000000ff705f730000000000fa}, resolvedTs: 449349172288028686, checkpointTs: 449349172288028686, state: Preparing *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/748f6965-3fc0-4c9c-883f-95bea8eb4fd8 {"id":"748f6965-3fc0-4c9c-883f-95bea8eb4fd8","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131128} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2bc237cc 748f6965-3fc0-4c9c-883f-95bea8eb4fd8 /tidb/cdc/default/default/changefeed/info/ddl-only-block-related-table {"upstream-id":7362136707987132970,"namespace":"default","changefeed-id":"ddl-only-block-related-table","sink-uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-26686?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-04-26T19:30:43.972484915+08:00","start-ts":449349168355868674,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-20-g16f5d59f9","epoch":449349168382083077} /tidb/cdc/default/default/changefeed/status/ddl-only-block-related-table {"checkpoint-ts":449349172288028686,"min-table-barrier-ts":449349191254409224,"admin-job-type":0} /tidb/cdc/default/default/task/position/748f6965-3fc0-4c9c-883f-95bea8eb4fd8/ddl-only-block-related-table {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7362136707987132970 {"id":7362136707987132970,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + break + set +x check diff failed 1-th time, retry later check diff successfully check_ts_forward ddl-only-block-related-table =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_lost_callback/run.sh using Sink-Type: kafka... <<================= [Fri Apr 26 19:32:13 CST 2024] <<<<<< run test case mq_sink_lost_callback success! >>>>>> check diff failed 17-th time, retry later start tidb cluster in /tmp/tidb_cdc_test/sql_mode Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... changefeed is working normally rts: 449349191778697200->449349192040579100 checkpoint: 449349191778697200->449349192040579100 run task successfully VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b5a538001d Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h8q8j-cjcsw, pid:8224, start at 2024-04-26 19:32:14.841409048 +0800 CST m=+5.152521647 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:34:14.849 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:32:14.847 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:22:14.847 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b5a538001d Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h8q8j-cjcsw, pid:8224, start at 2024-04-26 19:32:14.841409048 +0800 CST m=+5.152521647 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:34:14.849 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:32:14.847 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:22:14.847 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b5a6a40007 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h8q8j-cjcsw, pid:8304, start at 2024-04-26 19:32:14.89660449 +0800 CST m=+5.158044888 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:34:14.904 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:32:14.889 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:22:14.889 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/ddl_sequence/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/ddl_sequence/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/ddl_sequence/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/ddl_sequence/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/ddl_sequence/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } cdc.test: no process found wait process cdc.test exit for 1-th time... process cdc.test already exit [Fri Apr 26 19:32:16 CST 2024] <<<<<< run test case ddl_only_block_related_table success! >>>>>> Verifying downstream PD is started... check diff failed 18-th time, retry later table ddl_attributes.attributes_t1_new not exists for 1-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_dispatcher/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_sequence.cli.9687.out cli tso query --pd=http://127.0.0.1:2379 Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release check diff failed 19-th time, retry later table ddl_attributes.attributes_t1_new not exists for 2-th check, retry later + set +x + tso='449349193117990913 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349193117990913 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Fri Apr 26 19:32:19 CST 2024] <<<<<< START cdc server in ddl_sequence case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_sequence.97259727.out server --log-file /tmp/tidb_cdc_test/ddl_sequence/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_sequence/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 start tidb cluster in /tmp/tidb_cdc_test/mq_sink_dispatcher Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... table ddl_attributes.attributes_t1_new exists table ddl_attributes.finish_mark not exists for 1-th check, retry later check diff failed 20-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:32:22 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ebdc500e-9cea-45ef-a0ba-a2f1e2d8b310 {"id":"ebdc500e-9cea-45ef-a0ba-a2f1e2d8b310","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131140} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2d3d88d1 ebdc500e-9cea-45ef-a0ba-a2f1e2d8b310 /tidb/cdc/default/default/upstream/7362137127926630943 {"id":7362137127926630943,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ebdc500e-9cea-45ef-a0ba-a2f1e2d8b310 {"id":"ebdc500e-9cea-45ef-a0ba-a2f1e2d8b310","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131140} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2d3d88d1 ebdc500e-9cea-45ef-a0ba-a2f1e2d8b310 /tidb/cdc/default/default/upstream/7362137127926630943 {"id":7362137127926630943,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ebdc500e-9cea-45ef-a0ba-a2f1e2d8b310 {"id":"ebdc500e-9cea-45ef-a0ba-a2f1e2d8b310","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131140} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2d3d88d1 ebdc500e-9cea-45ef-a0ba-a2f1e2d8b310 /tidb/cdc/default/default/upstream/7362137127926630943 {"id":7362137127926630943,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_sequence.cli.9792.out cli changefeed create --start-ts=449349193117990913 '--sink-uri=kafka://127.0.0.1:9092/ticdc-ddl-sequence-test-32371?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' table ddl_attributes.finish_mark not exists for 2-th check, retry later check diff failed 21-th time, retry later Create changefeed successfully! ID: 5cd34466-2ff3-49cd-8562-1e32dc98cb18 Info: {"upstream_id":7362137127926630943,"namespace":"default","id":"5cd34466-2ff3-49cd-8562-1e32dc98cb18","sink_uri":"kafka://127.0.0.1:9092/ticdc-ddl-sequence-test-32371?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-04-26T19:32:23.314828428+08:00","start_ts":449349193117990913,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349193117990913,"checkpoint_ts":449349193117990913,"checkpoint_time":"2024-04-26 19:32:18.298"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release \033[0;36m<<< Run all test success >>>\033[0m [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc already exists) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] } + set +x [Fri Apr 26 19:32:24 CST 2024] <<<<<< START kafka consumer in ddl_sequence case >>>>>> [Pipeline] // container [Pipeline] } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] // withEnv [Pipeline] } table ddl_attributes.finish_mark not exists for 3-th check, retry later [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] } Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 22-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_attributes.finish_mark not exists for 4-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 23-th time, retry later [Fri Apr 26 19:32:22 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>> schema registry uri found: 1 [Fri Apr 26 19:32:22 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>> schema registry uri found: 2 [Fri Apr 26 19:32:22 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>> schema registry uri found: 3 table test.table1 not exists for 1-th check, retry later table test.table1 not exists for 2-th check, retry later table test.table1 exists table test.table2 exists table test.table3 exists check diff successfully table test.table10 not exists for 1-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_auto_stop/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table test.table10 exists table test.table20 exists check diff successfully VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b64d600016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0, pid:29498, start at 2024-04-26 19:32:25.609003152 +0800 CST m=+5.255828013 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:34:25.619 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:32:25.610 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:22:25.610 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b64d600016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0, pid:29498, start at 2024-04-26 19:32:25.609003152 +0800 CST m=+5.255828013 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:34:25.619 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:32:25.610 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:22:25.610 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b64e0c0015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0, pid:29578, start at 2024-04-26 19:32:25.648301529 +0800 CST m=+5.240509956 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:34:25.655 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:32:25.653 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:22:25.653 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/sql_mode/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/sql_mode/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/sql_mode/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/sql_mode/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/sql_mode/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table ddl_attributes.finish_mark not exists for 5-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 24-th time, retry later start tidb cluster in /tmp/tidb_cdc_test/changefeed_auto_stop Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... + trap stop_tidb_cluster EXIT + run_sql 'set global sql_mode='\''NO_BACKSLASH_ESCAPES'\'';' 127.0.0.1 4000 + run_sql 'set global sql_mode='\''NO_BACKSLASH_ESCAPES'\'';' 127.0.0.1 3306 + cd /tmp/tidb_cdc_test/sql_mode ++ run_cdc_cli_tso_query 127.0.0.1 2379 + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sql_mode.cli.30982.out cli tso query --pd=http://127.0.0.1:2379 Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... table ddl_sequence.finish_mark not exists for 1-th check, retry later table ddl_attributes.finish_mark not exists for 6-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 25-th time, retry later + set +x + tso='449349196463734785 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349196463734785 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + start_ts=449349196463734785 + run_cdc_server --workdir /tmp/tidb_cdc_test/sql_mode --binary cdc.test [Fri Apr 26 19:32:32 CST 2024] <<<<<< START cdc server in sql_mode case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sql_mode.3101931021.out server --log-file /tmp/tidb_cdc_test/sql_mode/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/sql_mode/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table ddl_sequence.finish_mark not exists for 2-th check, retry later table ddl_attributes.finish_mark not exists for 7-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b69dd00013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-52ncr-nbk6v, pid:12290, start at 2024-04-26 19:32:30.741988227 +0800 CST m=+5.249933708 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:34:30.750 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:32:30.757 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:22:30.757 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b69dd00013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-52ncr-nbk6v, pid:12290, start at 2024-04-26 19:32:30.741988227 +0800 CST m=+5.249933708 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:34:30.750 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:32:30.757 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:22:30.757 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b69f440014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-52ncr-nbk6v, pid:12357, start at 2024-04-26 19:32:30.835765442 +0800 CST m=+5.292457902 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:34:30.842 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:32:30.801 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:22:30.801 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } check diff failed 26-th time, retry later [2024/04/26 19:32:30.195 +08:00] [INFO] [case.go:115] ["sync updatePKUK take: 11.495400395s"] Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table ddl_attributes.finish_mark not exists for 8-th check, retry later table ddl_sequence.finish_mark not exists for 3-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:32:35 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/8cc558bb-a6e5-442e-bb22-c49697fa9bb6 {"id":"8cc558bb-a6e5-442e-bb22-c49697fa9bb6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131152} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2d6ae7da 8cc558bb-a6e5-442e-bb22-c49697fa9bb6 /tidb/cdc/default/default/upstream/7362137167071663217 {"id":7362137167071663217,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/8cc558bb-a6e5-442e-bb22-c49697fa9bb6 {"id":"8cc558bb-a6e5-442e-bb22-c49697fa9bb6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131152} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2d6ae7da 8cc558bb-a6e5-442e-bb22-c49697fa9bb6 /tidb/cdc/default/default/upstream/7362137167071663217 {"id":7362137167071663217,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/8cc558bb-a6e5-442e-bb22-c49697fa9bb6 {"id":"8cc558bb-a6e5-442e-bb22-c49697fa9bb6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131152} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2d6ae7da 8cc558bb-a6e5-442e-bb22-c49697fa9bb6 /tidb/cdc/default/default/upstream/7362137167071663217 {"id":7362137167071663217,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + SINK_URI='mysql://root@127.0.0.1:3306/?max-txn-row=1' + run_cdc_cli changefeed create --start-ts=449349196463734785 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sql_mode.cli.31073.out cli changefeed create --start-ts=449349196463734785 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 Create changefeed successfully! ID: test-1 Info: {"upstream_id":7362137167071663217,"namespace":"default","id":"test-1","sink_uri":"mysql://root@127.0.0.1:3306/?max-txn-row=1","create_time":"2024-04-26T19:32:36.082110047+08:00","start_ts":449349196463734785,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349196463734785,"checkpoint_ts":449349196463734785,"checkpoint_time":"2024-04-26 19:32:31.061"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... [Fri Apr 26 19:32:35 CST 2024] <<<<<< START cdc server in mq_sink_dispatcher case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.1366513667.out server --log-file /tmp/tidb_cdc_test/mq_sink_dispatcher/cdc.log --log-level info --data-dir /tmp/tidb_cdc_test/mq_sink_dispatcher/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff failed 27-th time, retry later table ddl_attributes.finish_mark not exists for 9-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_sequence.finish_mark not exists for 4-th check, retry later + set +x + run_sql 'use test; create table t1(id bigint primary key, a text, b text as ((regexp_replace(a, '\''^[1-9]\d{9,29}$'\'', '\''aaaaa'\''))), c text); insert into t1 (id, a, c) values(1,123456, '\''ab\\\\c'\''); insert into t1 (id, a, c) values(2,1234567890123, '\''ab\\c'\'');' 127.0.0.1 4000 + '[' kafka == mysql ']' + stop_tidb_cluster check diff failed 28-th time, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:32:38 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/09dde6e5-79e4-453d-a828-648d25dae747 {"id":"09dde6e5-79e4-453d-a828-648d25dae747","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131156} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2d7f00cc 09dde6e5-79e4-453d-a828-648d25dae747 /tidb/cdc/default/default/upstream/7362137192775127805 {"id":7362137192775127805,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/09dde6e5-79e4-453d-a828-648d25dae747 {"id":"09dde6e5-79e4-453d-a828-648d25dae747","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131156} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2d7f00cc 09dde6e5-79e4-453d-a828-648d25dae747 /tidb/cdc/default/default/upstream/7362137192775127805 {"id":7362137192775127805,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/09dde6e5-79e4-453d-a828-648d25dae747 {"id":"09dde6e5-79e4-453d-a828-648d25dae747","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131156} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2d7f00cc 09dde6e5-79e4-453d-a828-648d25dae747 /tidb/cdc/default/default/upstream/7362137192775127805 {"id":7362137192775127805,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x table ddl_attributes.finish_mark not exists for 10-th check, retry later table ddl_sequence.finish_mark exists check diff successfully wait process cdc.test exit for 1-th time... + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.cli.13747.out cli tso query --pd=http://127.0.0.1:2379 check diff failed 29-th time, retry later wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Fri Apr 26 19:32:40 CST 2024] <<<<<< run test case ddl_sequence success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_attributes.finish_mark exists check diff successfully + set +x + tso='449349198887780355 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349198887780355 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.cli.13781.out cli changefeed create --start-ts=449349198887780355 '--sink-uri=kafka://127.0.0.1:9092/dispatcher-test?protocol=canal-json&enable-tidb-extension=true' -c test --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_dispatcher/conf/changefeed.toml wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... [2024/04/26 19:32:41.144 +08:00] [INFO] [main.go:86] ["running ddl test: 0 createDropSchemaDDL"] [2024/04/26 19:32:41.362 +08:00] [INFO] [main.go:220] ["1 insert success: 100"] [2024/04/26 19:32:41.365 +08:00] [INFO] [main.go:220] ["0 insert success: 100"] [2024/04/26 19:32:41.552 +08:00] [INFO] [main.go:220] ["1 insert success: 200"] [2024/04/26 19:32:41.554 +08:00] [INFO] [main.go:234] ["0 delete success: 100"] [2024/04/26 19:32:41.554 +08:00] [INFO] [main.go:234] ["1 delete success: 100"] [2024/04/26 19:32:41.555 +08:00] [INFO] [main.go:220] ["0 insert success: 200"] [2024/04/26 19:32:41.746 +08:00] [INFO] [main.go:220] ["0 insert success: 300"] [2024/04/26 19:32:41.752 +08:00] [INFO] [main.go:220] ["1 insert success: 300"] Create changefeed successfully! ID: test Info: {"upstream_id":7362137192775127805,"namespace":"default","id":"test","sink_uri":"kafka://127.0.0.1:9092/dispatcher-test?protocol=canal-json\u0026enable-tidb-extension=true","create_time":"2024-04-26T19:32:42.251499736+08:00","start_ts":449349198887780355,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"dispatchers":[{"matcher":["verify.t"],"partition":"index-value"},{"matcher":["dispatcher.index"],"partition":"index-value","index":"idx_a"}],"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":449349198887780355,"checkpoint_ts":449349198887780355,"checkpoint_time":"2024-04-26 19:32:40.308"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b747100018 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z94t0-92h56, pid:10779, start at 2024-04-26 19:32:41.581586469 +0800 CST m=+5.126777916 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:34:41.588 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:32:41.591 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:22:41.591 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b747100018 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z94t0-92h56, pid:10779, start at 2024-04-26 19:32:41.581586469 +0800 CST m=+5.126777916 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:34:41.588 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:32:41.591 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:22:41.591 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b747c80015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-z94t0-92h56, pid:10865, start at 2024-04-26 19:32:41.627964194 +0800 CST m=+5.122505579 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:34:41.633 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:32:41.636 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:22:41.636 +0800 All versions after safe point can be accessed. (DO NOT EDIT) check diff failed 30-th time, retry later cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/changefeed_auto_stop/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/changefeed_auto_stop/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_auto_stop/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_auto_stop/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_auto_stop/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [2024/04/26 19:32:42.980 +08:00] [INFO] [main.go:234] ["0 delete success: 200"] [2024/04/26 19:32:42.981 +08:00] [INFO] [main.go:220] ["0 insert success: 400"] [2024/04/26 19:32:42.984 +08:00] [INFO] [main.go:220] ["1 insert success: 400"] [2024/04/26 19:32:42.986 +08:00] [INFO] [main.go:234] ["1 delete success: 200"] [Fri Apr 26 19:32:42 CST 2024] <<<<<< run test case ddl_attributes success! >>>>>> [2024/04/26 19:32:43.172 +08:00] [INFO] [main.go:220] ["0 insert success: 500"] [2024/04/26 19:32:43.174 +08:00] [INFO] [main.go:220] ["1 insert success: 500"] [2024/04/26 19:32:43.367 +08:00] [INFO] [main.go:234] ["0 delete success: 300"] [2024/04/26 19:32:43.368 +08:00] [INFO] [main.go:220] ["0 insert success: 600"] [2024/04/26 19:32:43.369 +08:00] [INFO] [main.go:220] ["1 insert success: 600"] [2024/04/26 19:32:43.372 +08:00] [INFO] [main.go:234] ["1 delete success: 300"] + set +x check_changefeed_state http://127.0.0.1:2379 test normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=test + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s + info='{ "upstream_id": 7362137192775127805, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449349198887780355, "checkpoint_time": "2024-04-26 19:32:40.308", "error": null }' + echo '{ "upstream_id": 7362137192775127805, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449349198887780355, "checkpoint_time": "2024-04-26 19:32:40.308", "error": null }' { "upstream_id": 7362137192775127805, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449349198887780355, "checkpoint_time": "2024-04-26 19:32:40.308", "error": null } ++ echo '{' '"upstream_id":' 7362137192775127805, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449349198887780355, '"checkpoint_time":' '"2024-04-26' '19:32:40.308",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7362137192775127805, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449349198887780355, '"checkpoint_time":' '"2024-04-26' '19:32:40.308",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check diff failed at last A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log' cat: /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/sync_diff/output/sync_diff.log: No such file or directory [2024/04/26 19:32:44.588 +08:00] [INFO] [main.go:220] ["0 insert success: 700"] [2024/04/26 19:32:44.598 +08:00] [INFO] [main.go:220] ["1 insert success: 700"] [2024/04/26 19:32:44.781 +08:00] [INFO] [main.go:234] ["0 delete success: 400"] [2024/04/26 19:32:44.783 +08:00] [INFO] [main.go:220] ["0 insert success: 800"] [2024/04/26 19:32:44.807 +08:00] [INFO] [main.go:220] ["1 insert success: 800"] [2024/04/26 19:32:44.809 +08:00] [INFO] [main.go:234] ["1 delete success: 400"] [2024/04/26 19:32:44.979 +08:00] [INFO] [main.go:220] ["0 insert success: 900"] [2024/04/26 19:32:45.024 +08:00] [INFO] [main.go:220] ["1 insert success: 900"] check_changefeed_state http://127.0.0.1:2379 test failed ErrDispatcherFailed + endpoints=http://127.0.0.1:2379 + changefeed_id=test + expected_state=failed + error_msg=ErrDispatcherFailed + tls_dir=ErrDispatcherFailed + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s + info='{ "upstream_id": 7362137192775127805, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449349198887780355, "checkpoint_time": "2024-04-26 19:32:40.308", "error": null }' + echo '{ "upstream_id": 7362137192775127805, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449349198887780355, "checkpoint_time": "2024-04-26 19:32:40.308", "error": null }' { "upstream_id": 7362137192775127805, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449349198887780355, "checkpoint_time": "2024-04-26 19:32:40.308", "error": null } ++ echo '{' '"upstream_id":' 7362137192775127805, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449349198887780355, '"checkpoint_time":' '"2024-04-26' '19:32:40.308",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \f\a\i\l\e\d ]] + echo 'changefeed state normal does not equal to failed' changefeed state normal does not equal to failed + exit 1 run task failed 1-th time, retry later ***************** properties ***************** "mysql.port"="4000" "dotransactions"="false" "workload"="core" "readallfields"="true" "mysql.db"="changefeed_auto_stop_1" "readproportion"="0" "requestdistribution"="uniform" "mysql.host"="127.0.0.1" "threadcount"="4" "insertproportion"="0" "mysql.user"="root" "recordcount"="20" "operationcount"="0" "scanproportion"="0" "updateproportion"="0" ********************************************** Run finished, takes 9.085395ms INSERT - Takes(s): 0.0, Count: 20, OPS: 3989.2, Avg(us): 1690, Min(us): 913, Max(us): 3941, 95th(us): 4000, 99th(us): 4000 ***************** properties ***************** "dotransactions"="false" "readproportion"="0" "mysql.port"="4000" "recordcount"="20" "requestdistribution"="uniform" "insertproportion"="0" "workload"="core" "readallfields"="true" "mysql.db"="changefeed_auto_stop_2" "mysql.host"="127.0.0.1" "scanproportion"="0" "threadcount"="4" "operationcount"="0" "mysql.user"="root" "updateproportion"="0" ********************************************** Run finished, takes 8.745972ms INSERT - Takes(s): 0.0, Count: 20, OPS: 4292.8, Avg(us): 1654, Min(us): 905, Max(us): 4037, 95th(us): 5000, 99th(us): 5000 check_changefeed_state http://127.0.0.1:2379 test failed ErrDispatcherFailed + endpoints=http://127.0.0.1:2379 + changefeed_id=test + expected_state=failed + error_msg=ErrDispatcherFailed + tls_dir=ErrDispatcherFailed + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s + info='{ "upstream_id": 7362137192775127805, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449349199988785161, "checkpoint_time": "2024-04-26 19:32:44.508", "error": null }' + echo '{ "upstream_id": 7362137192775127805, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449349199988785161, "checkpoint_time": "2024-04-26 19:32:44.508", "error": null }' { "upstream_id": 7362137192775127805, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449349199988785161, "checkpoint_time": "2024-04-26 19:32:44.508", "error": null } ++ echo '{' '"upstream_id":' 7362137192775127805, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449349199988785161, '"checkpoint_time":' '"2024-04-26' '19:32:44.508",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \f\a\i\l\e\d ]] + echo 'changefeed state normal does not equal to failed' changefeed state normal does not equal to failed + exit 1 run task failed 2-th time, retry later ++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 723 100 723 0 0 9015 0 --:--:-- --:--:-- --:--:-- 9037 + synced_status='{"synced":false,"sink_checkpoint_ts":"2024-04-26 19:30:26.039","puller_resolved_ts":"2024-04-26 19:30:26.039","last_synced_ts":"2024-04-26 19:30:19.588","now_ts":"2024-04-26 19:32:36.000","info":"Please check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view '\''TiKV-Details'\'' \u003e '\''Resolved-Ts'\'' \u003e '\''Max Leader Resolved TS gap'\'' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please wait"}' ++ echo '{"synced":false,"sink_checkpoint_ts":"2024-04-26' '19:30:26.039","puller_resolved_ts":"2024-04-26' '19:30:26.039","last_synced_ts":"2024-04-26' '19:30:19.588","now_ts":"2024-04-26' '19:32:36.000","info":"Please' check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view ''\''TiKV-Details'\''' '\u003e' ''\''Resolved-Ts'\''' '\u003e' ''\''Max' Leader Resolved TS 'gap'\''' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please 'wait"}' ++ jq .synced + status=false + '[' false '!=' false ']' ++ echo '{"synced":false,"sink_checkpoint_ts":"2024-04-26' '19:30:26.039","puller_resolved_ts":"2024-04-26' '19:30:26.039","last_synced_ts":"2024-04-26' '19:30:19.588","now_ts":"2024-04-26' '19:32:36.000","info":"Please' check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view ''\''TiKV-Details'\''' '\u003e' ''\''Resolved-Ts'\''' '\u003e' ''\''Max' Leader Resolved TS 'gap'\''' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please 'wait"}' ++ jq -r .info + info='Please check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view '\''TiKV-Details'\'' > '\''Resolved-Ts'\'' > '\''Max Leader Resolved TS gap'\'' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please wait' + target_message='Please check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view '\''TiKV-Details'\'' > '\''Resolved-Ts'\'' > '\''Max Leader Resolved TS gap'\'' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please wait' + '[' 'Please check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view '\''TiKV-Details'\'' > '\''Resolved-Ts'\'' > '\''Max Leader Resolved TS gap'\'' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please wait' '!=' 'Please check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view '\''TiKV-Details'\'' > '\''Resolved-Ts'\'' > '\''Max Leader Resolved TS gap'\'' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please wait' ']' + cleanup_process cdc.test wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... wait process cdc.test exit for 3-th time... cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit + stop_tidb_cluster + run_case_with_unavailable_tidb conf/changefeed-redo.toml + rm -rf /tmp/tidb_cdc_test/synced_status_with_redo + mkdir -p /tmp/tidb_cdc_test/synced_status_with_redo + start_tidb_cluster --workdir /tmp/tidb_cdc_test/synced_status_with_redo shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory The 1 times to try to start tidb cluster... shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory ***************** properties ***************** "operationcount"="0" "readallfields"="true" "recordcount"="20" "readproportion"="0" "requestdistribution"="uniform" "mysql.user"="root" "scanproportion"="0" "mysql.host"="127.0.0.1" "insertproportion"="0" "mysql.db"="changefeed_auto_stop_3" "updateproportion"="0" "threadcount"="4" "dotransactions"="false" "workload"="core" "mysql.port"="4000" ********************************************** Run finished, takes 9.412613ms INSERT - Takes(s): 0.0, Count: 20, OPS: 3772.4, Avg(us): 1756, Min(us): 994, Max(us): 4023, 95th(us): 4000, 99th(us): 5000 ***************** properties ***************** "readallfields"="true" "mysql.port"="4000" "threadcount"="4" "requestdistribution"="uniform" "dotransactions"="false" "mysql.user"="root" "insertproportion"="0" "updateproportion"="0" "recordcount"="20" "operationcount"="0" "scanproportion"="0" "workload"="core" "readproportion"="0" "mysql.host"="127.0.0.1" "mysql.db"="changefeed_auto_stop_4" ********************************************** Run finished, takes 24.065738ms INSERT - Takes(s): 0.0, Count: 20, OPS: 3701.2, Avg(us): 4722, Min(us): 1014, Max(us): 18657, 95th(us): 19000, 99th(us): 19000 [Fri Apr 26 19:32:49 CST 2024] <<<<<< START cdc server in changefeed_auto_stop case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_auto_stop.1246312465.out server --log-file /tmp/tidb_cdc_test/changefeed_auto_stop/cdc1.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_auto_stop/cdc_data1 --cluster-id default --addr 127.0.0.1:8301 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info * About to connect() to 127.0.0.1 port 8301 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8301; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + start_tidb_cluster --workdir /tmp/tidb_cdc_test/sql_mode The 1 times to try to start tidb cluster... check_changefeed_state http://127.0.0.1:2379 test failed ErrDispatcherFailed + endpoints=http://127.0.0.1:2379 + changefeed_id=test + expected_state=failed + error_msg=ErrDispatcherFailed + tls_dir=ErrDispatcherFailed + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s + info='{ "upstream_id": 7362137192775127805, "namespace": "default", "id": "test", "state": "failed", "checkpoint_tso": 449349199988785161, "checkpoint_time": "2024-04-26 19:32:44.508", "error": { "time": "2024-04-26T19:32:48.795622774+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrDispatcherFailed", "message": "[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a" } }' + echo '{ "upstream_id": 7362137192775127805, "namespace": "default", "id": "test", "state": "failed", "checkpoint_tso": 449349199988785161, "checkpoint_time": "2024-04-26 19:32:44.508", "error": { "time": "2024-04-26T19:32:48.795622774+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrDispatcherFailed", "message": "[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a" } }' { "upstream_id": 7362137192775127805, "namespace": "default", "id": "test", "state": "failed", "checkpoint_tso": 449349199988785161, "checkpoint_time": "2024-04-26 19:32:44.508", "error": { "time": "2024-04-26T19:32:48.795622774+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrDispatcherFailed", "message": "[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a" } } ++ jq -r .state ++ echo '{' '"upstream_id":' 7362137192775127805, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"failed",' '"checkpoint_tso":' 449349199988785161, '"checkpoint_time":' '"2024-04-26' '19:32:44.508",' '"error":' '{' '"time":' '"2024-04-26T19:32:48.795622774+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrDispatcherFailed",' '"message":' '"[CDC:ErrDispatcherFailed]index' not found when dispatch event, table: index, index: 'idx_a"' '}' '}' + state=failed + [[ ! failed == \f\a\i\l\e\d ]] ++ jq -r .error.message ++ echo '{' '"upstream_id":' 7362137192775127805, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"failed",' '"checkpoint_tso":' 449349199988785161, '"checkpoint_time":' '"2024-04-26' '19:32:44.508",' '"error":' '{' '"time":' '"2024-04-26T19:32:48.795622774+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrDispatcherFailed",' '"message":' '"[CDC:ErrDispatcherFailed]index' not found when dispatch event, table: index, index: 'idx_a"' '}' '}' + message='[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a' + [[ ! [CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a =~ ErrDispatcherFailed ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.cli.14041.out cli changefeed update -c test '--sink-uri=kafka://127.0.0.1:9092/dispatcher-test?protocol=canal-json&enable-tidb-extension=true' --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_dispatcher/conf/new_changefeed.toml --no-confirm Diff of changefeed config: {Type:update Path:[Config SyncPointInterval] From: To:0xc0039647a8} {Type:update Path:[Config SyncPointRetention] From: To:0xc0039647b8} {Type:update Path:[Config Sink DispatchRules 0 Matcher 0] From:verify.t To:dispatcher.index} {Type:delete Path:[Config Sink DispatchRules 1 Matcher 0] From:dispatcher.index To:} {Type:delete Path:[Config Sink DispatchRules 1 PartitionRule] From:index-value To:} {Type:delete Path:[Config Sink DispatchRules 1 IndexName] From:idx_a To:} {Type:update Path:[Config Consistent] From: To:0xc000e495e0} Update changefeed config successfully! ID: test Info: {"upstream_id":7362137192775127805,"namespace":"default","id":"test","sink_uri":"kafka://127.0.0.1:9092/dispatcher-test?protocol=canal-json\u0026enable-tidb-extension=true","create_time":"2024-04-26T19:32:42.251499736+08:00","start_ts":449349198887780355,"admin_job_type":1,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_table_monitor":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","dispatchers":[{"matcher":["dispatcher.index"],"partition":"index-value"}],"encoder_concurrency":32,"terminator":"\r\n","enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"failed","error":{"addr":"127.0.0.1:8300","code":"CDC:ErrDispatcherFailed","message":"[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a"},"creator_version":"v8.2.0-alpha-20-g16f5d59f9","resolved_ts":0,"checkpoint_ts":449349199988785161,"checkpoint_time":"2024-04-26 19:32:44.508"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info * About to connect() to 127.0.0.1 port 8301 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8301 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8301 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:32:52 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c8e10157-d3ba-4754-8da6-767dcfbb6abf {"id":"c8e10157-d3ba-4754-8da6-767dcfbb6abf","address":"127.0.0.1:8301","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131169} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2da8be49 c8e10157-d3ba-4754-8da6-767dcfbb6abf /tidb/cdc/default/default/upstream/7362137245957577435 {"id":7362137245957577435,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c8e10157-d3ba-4754-8da6-767dcfbb6abf {"id":"c8e10157-d3ba-4754-8da6-767dcfbb6abf","address":"127.0.0.1:8301","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131169} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2da8be49 c8e10157-d3ba-4754-8da6-767dcfbb6abf /tidb/cdc/default/default/upstream/7362137245957577435 {"id":7362137245957577435,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c8e10157-d3ba-4754-8da6-767dcfbb6abf {"id":"c8e10157-d3ba-4754-8da6-767dcfbb6abf","address":"127.0.0.1:8301","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131169} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2da8be49 c8e10157-d3ba-4754-8da6-767dcfbb6abf /tidb/cdc/default/default/upstream/7362137245957577435 {"id":7362137245957577435,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Fri Apr 26 19:32:52 CST 2024] <<<<<< START cdc server in changefeed_auto_stop case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/pipeline/ProcessorSyncResolvedError=1*return(true);github.com/pingcap/tiflow/cdc/processor/ProcessorUpdatePositionDelaying=sleep(1000)' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_auto_stop.1251812520.out server --log-file /tmp/tidb_cdc_test/changefeed_auto_stop/cdc2.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_auto_stop/cdc_data2 --cluster-id default --addr 127.0.0.1:8302 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info * About to connect() to 127.0.0.1 port 8302 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8302; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/resourcecontrol/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... start tidb cluster in /tmp/tidb_cdc_test/sql_mode Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... [2024/04/26 19:32:51.659 +08:00] [INFO] [main.go:86] ["running ddl test: 1 truncateDDL"] [2024/04/26 19:32:51.879 +08:00] [INFO] [main.go:220] ["0 insert success: 100"] [2024/04/26 19:32:51.896 +08:00] [INFO] [main.go:220] ["1 insert success: 100"] [2024/04/26 19:32:52.069 +08:00] [INFO] [main.go:220] ["0 insert success: 200"] [2024/04/26 19:32:52.071 +08:00] [INFO] [main.go:234] ["0 delete success: 100"] [2024/04/26 19:32:52.097 +08:00] [INFO] [main.go:234] ["1 delete success: 100"] [2024/04/26 19:32:52.098 +08:00] [INFO] [main.go:220] ["1 insert success: 200"] [2024/04/26 19:32:52.265 +08:00] [INFO] [main.go:220] ["0 insert success: 300"] [2024/04/26 19:32:52.302 +08:00] [INFO] [main.go:220] ["1 insert success: 300"] [2024/04/26 19:32:52.452 +08:00] [INFO] [main.go:220] ["0 insert success: 400"] [2024/04/26 19:32:52.459 +08:00] [INFO] [main.go:234] ["0 delete success: 200"] [2024/04/26 19:32:52.501 +08:00] [INFO] [main.go:234] ["1 delete success: 200"] [2024/04/26 19:32:52.501 +08:00] [INFO] [main.go:220] ["1 insert success: 400"] [2024/04/26 19:32:52.636 +08:00] [INFO] [main.go:220] ["0 insert success: 500"] [2024/04/26 19:32:52.702 +08:00] [INFO] [main.go:220] ["1 insert success: 500"] [2024/04/26 19:32:52.848 +08:00] [INFO] [main.go:220] ["0 insert success: 600"] [2024/04/26 19:32:52.855 +08:00] [INFO] [main.go:234] ["0 delete success: 300"] [2024/04/26 19:32:52.907 +08:00] [INFO] [main.go:220] ["1 insert success: 600"] [2024/04/26 19:32:52.910 +08:00] [INFO] [main.go:234] ["1 delete success: 300"] [2024/04/26 19:32:53.059 +08:00] [INFO] [main.go:220] ["0 insert success: 700"] [2024/04/26 19:32:53.118 +08:00] [INFO] [main.go:220] ["1 insert success: 700"] [2024/04/26 19:32:53.251 +08:00] [INFO] [main.go:220] ["0 insert success: 800"] [2024/04/26 19:32:53.257 +08:00] [INFO] [main.go:234] ["0 delete success: 400"] [2024/04/26 19:32:53.311 +08:00] [INFO] [main.go:220] ["1 insert success: 800"] [2024/04/26 19:32:53.314 +08:00] [INFO] [main.go:234] ["1 delete success: 400"] [2024/04/26 19:32:53.455 +08:00] [INFO] [main.go:220] ["0 insert success: 900"] [2024/04/26 19:32:53.500 +08:00] [INFO] [main.go:220] ["1 insert success: 900"] [2024/04/26 19:32:53.656 +08:00] [INFO] [main.go:220] ["0 insert success: 1000"] [2024/04/26 19:32:53.666 +08:00] [INFO] [main.go:234] ["0 delete success: 500"] chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory start tidb cluster in /tmp/tidb_cdc_test/synced_status_with_redo Starting Upstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.cli.14075.out cli changefeed resume -c test [2024/04/26 19:32:53.688 +08:00] [INFO] [main.go:220] ["1 insert success: 1000"] [2024/04/26 19:32:53.691 +08:00] [INFO] [main.go:234] ["1 delete success: 500"] [2024/04/26 19:32:53.860 +08:00] [INFO] [main.go:220] ["0 insert success: 1100"] [2024/04/26 19:32:53.878 +08:00] [INFO] [main.go:220] ["1 insert success: 1100"] [2024/04/26 19:32:54.091 +08:00] [INFO] [main.go:220] ["1 insert success: 1200"] [2024/04/26 19:32:54.094 +08:00] [INFO] [main.go:234] ["1 delete success: 600"] [2024/04/26 19:32:54.096 +08:00] [INFO] [main.go:220] ["0 insert success: 1200"] [2024/04/26 19:32:54.106 +08:00] [INFO] [main.go:234] ["0 delete success: 600"] [2024/04/26 19:32:54.285 +08:00] [INFO] [main.go:220] ["1 insert success: 1300"] [2024/04/26 19:32:54.311 +08:00] [INFO] [main.go:220] ["0 insert success: 1300"] PASS Verifying downstream PD is started... [2024/04/26 19:32:54.473 +08:00] [INFO] [main.go:220] ["1 insert success: 1400"] [2024/04/26 19:32:54.476 +08:00] [INFO] [main.go:234] ["1 delete success: 700"] [2024/04/26 19:32:54.503 +08:00] [INFO] [main.go:220] ["0 insert success: 1400"] [2024/04/26 19:32:54.512 +08:00] [INFO] [main.go:234] ["0 delete success: 700"] [2024/04/26 19:32:54.656 +08:00] [INFO] [main.go:220] ["1 insert success: 1500"] coverage: 2.1% of statements in github.com/pingcap/tiflow/... [2024/04/26 19:32:54.701 +08:00] [INFO] [main.go:220] ["0 insert success: 1500"] [2024/04/26 19:32:54.840 +08:00] [INFO] [main.go:220] ["1 insert success: 1600"] [2024/04/26 19:32:54.846 +08:00] [INFO] [main.go:234] ["1 delete success: 800"] [2024/04/26 19:32:54.886 +08:00] [INFO] [main.go:220] ["0 insert success: 1600"] [2024/04/26 19:32:54.899 +08:00] [INFO] [main.go:234] ["0 delete success: 800"] [2024/04/26 19:32:55.029 +08:00] [INFO] [main.go:220] ["1 insert success: 1700"] [2024/04/26 19:32:55.075 +08:00] [INFO] [main.go:220] ["0 insert success: 1700"] \033[0;36m<<< Run all test success >>>\033[0m [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1735/tiflow-cdc already exists) [Pipeline] // cache [Pipeline] } start tidb cluster in /tmp/tidb_cdc_test/resourcecontrol Starting Upstream PD... [Pipeline] // dir [2024/04/26 19:32:55.214 +08:00] [INFO] [main.go:220] ["1 insert success: 1800"] [2024/04/26 19:32:55.221 +08:00] [INFO] [main.go:234] ["1 delete success: 900"] [2024/04/26 19:32:55.264 +08:00] [INFO] [main.go:220] ["0 insert success: 1800"] [2024/04/26 19:32:55.278 +08:00] [INFO] [main.go:234] ["0 delete success: 900"] [2024/04/26 19:32:55.405 +08:00] [INFO] [main.go:220] ["1 insert success: 1900"] [Pipeline] } [Pipeline] // withCredentials [Pipeline] } Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Starting Downstream PD... Release Version: v8.2.0-alpha-10-g50c80407c Edition: Community Git Commit Hash: 50c80407cd57e96a85452dd1601fcb41c1f263cf Git Branch: master UTC Build Time: 2024-04-26 02:17:39 Verifying upstream PD is started... [Pipeline] // timeout [2024/04/26 19:32:55.453 +08:00] [INFO] [main.go:220] ["0 insert success: 1900"] [2024/04/26 19:32:55.595 +08:00] [INFO] [main.go:220] ["1 insert success: 2000"] [2024/04/26 19:32:55.602 +08:00] [INFO] [main.go:234] ["1 delete success: 1000"] [2024/04/26 19:32:55.642 +08:00] [INFO] [main.go:220] ["0 insert success: 2000"] [2024/04/26 19:32:55.655 +08:00] [INFO] [main.go:234] ["0 delete success: 1000"] [Pipeline] } [Pipeline] // stage [Pipeline] } + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info * About to connect() to 127.0.0.1 port 8302 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8302 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8302 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:32:55 GMT < Content-Length: 1271 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9d391278-4a60-466a-9a44-135a0ac4cafc {"id":"9d391278-4a60-466a-9a44-135a0ac4cafc","address":"127.0.0.1:8302","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131172} /tidb/cdc/default/__cdc_meta__/capture/c8e10157-d3ba-4754-8da6-767dcfbb6abf {"id":"c8e10157-d3ba-4754-8da6-767dcfbb6abf","address":"127.0.0.1:8301","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131169} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2da8be49 c8e10157-d3ba-4754-8da6-767dcfbb6abf /tidb/cdc/default/__cdc_meta__/owner/22318f1a2da8be7c 9d391278-4a60-466a-9a44-135a0ac4cafc /tidb/cdc/default/default/upstream/7362137245957577435 {"id":7362137245957577435,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9d391278-4a60-466a-9a44-135a0ac4cafc {"id":"9d391278-4a60-466a-9a44-135a0ac4cafc","address":"127.0.0.1:8302","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131172} /tidb/cdc/default/__cdc_meta__/capture/c8e10157-d3ba-4754-8da6-767dcfbb6abf {"id":"c8e10157-d3ba-4754-8da6-767dcfbb6abf","address":"127.0.0.1:8301","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131169} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2da8be49 c8e10157-d3ba-4754-8da6-767dcfbb6abf /tidb/cdc/default/__cdc_meta__/owner/22318f1a2da8be7c 9d391278-4a60-466a-9a44-135a0ac4cafc /tidb/cdc/default/default/upstream/7362137245957577435 {"id":7362137245957577435,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/9d391278-4a60-466a-9a44-135a0ac4cafc {"id":"9d391278-4a60-466a-9a44-135a0ac4cafc","address":"127.0.0.1:8302","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131172} /tidb/cdc/default/__cdc_meta__/capture/c8e10157-d3ba-4754-8da6-767dcfbb6abf {"id":"c8e10157-d3ba-4754-8da6-767dcfbb6abf","address":"127.0.0.1:8301","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131169} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2da8be49 c8e10157-d3ba-4754-8da6-767dcfbb6abf /tidb/cdc/default/__cdc_meta__/owner/22318f1a2da8be7c 9d391278-4a60-466a-9a44-135a0ac4cafc /tidb/cdc/default/default/upstream/7362137245957577435 {"id":7362137245957577435,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Fri Apr 26 19:32:55 CST 2024] <<<<<< START kafka consumer in changefeed_auto_stop case >>>>>> check_changefeed_state http://127.0.0.1:2379 113ef91c-b3be-4348-b3fd-4ed00e070f64 normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=113ef91c-b3be-4348-b3fd-4ed00e070f64 + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 113ef91c-b3be-4348-b3fd-4ed00e070f64 -s + info='{ "upstream_id": 7362137245957577435, "namespace": "default", "id": "113ef91c-b3be-4348-b3fd-4ed00e070f64", "state": "normal", "checkpoint_tso": 449349200062709761, "checkpoint_time": "2024-04-26 19:32:44.790", "error": null }' + echo '{ "upstream_id": 7362137245957577435, "namespace": "default", "id": "113ef91c-b3be-4348-b3fd-4ed00e070f64", "state": "normal", "checkpoint_tso": 449349200062709761, "checkpoint_time": "2024-04-26 19:32:44.790", "error": null }' { "upstream_id": 7362137245957577435, "namespace": "default", "id": "113ef91c-b3be-4348-b3fd-4ed00e070f64", "state": "normal", "checkpoint_tso": 449349200062709761, "checkpoint_time": "2024-04-26 19:32:44.790", "error": null } ++ echo '{' '"upstream_id":' 7362137245957577435, '"namespace":' '"default",' '"id":' '"113ef91c-b3be-4348-b3fd-4ed00e070f64",' '"state":' '"normal",' '"checkpoint_tso":' 449349200062709761, '"checkpoint_time":' '"2024-04-26' '19:32:44.790",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7362137245957577435, '"namespace":' '"default",' '"id":' '"113ef91c-b3be-4348-b3fd-4ed00e070f64",' '"state":' '"normal",' '"checkpoint_tso":' 449349200062709761, '"checkpoint_time":' '"2024-04-26' '19:32:44.790",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully table changefeed_auto_stop_1.usertable not exists for 1-th check, retry later [Pipeline] // container [Pipeline] } [2024/04/26 19:32:55.792 +08:00] [INFO] [main.go:220] ["1 insert success: 2100"] [2024/04/26 19:32:55.837 +08:00] [INFO] [main.go:220] ["0 insert success: 2100"] Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [Pipeline] // withEnv + set +x check_changefeed_state http://127.0.0.1:2379 test normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=test + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s + info='{ "upstream_id": 7362137192775127805, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449349199988785161, "checkpoint_time": "2024-04-26 19:32:44.508", "error": null }' + echo '{ "upstream_id": 7362137192775127805, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449349199988785161, "checkpoint_time": "2024-04-26 19:32:44.508", "error": null }' { "upstream_id": 7362137192775127805, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449349199988785161, "checkpoint_time": "2024-04-26 19:32:44.508", "error": null } ++ echo '{' '"upstream_id":' 7362137192775127805, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449349199988785161, '"checkpoint_time":' '"2024-04-26' '19:32:44.508",' '"error":' null '}' ++ jq -r .state [Pipeline] } [2024/04/26 19:32:55.994 +08:00] [INFO] [main.go:220] ["1 insert success: 2200"] [2024/04/26 19:32:56.001 +08:00] [INFO] [main.go:234] ["1 delete success: 1100"] [2024/04/26 19:32:56.037 +08:00] [INFO] [main.go:220] ["0 insert success: 2200"] [2024/04/26 19:32:56.053 +08:00] [INFO] [main.go:234] ["0 delete success: 1100"] [2024/04/26 19:32:56.182 +08:00] [INFO] [main.go:220] ["1 insert success: 2300"] [Pipeline] // node [Pipeline] } + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7362137192775127805, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449349199988785161, '"checkpoint_time":' '"2024-04-26' '19:32:44.508",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } [2024/04/26 19:32:56.228 +08:00] [INFO] [main.go:220] ["0 insert success: 2300"] [2024/04/26 19:32:56.372 +08:00] [INFO] [main.go:220] ["1 insert success: 2400"] [2024/04/26 19:32:56.382 +08:00] [INFO] [main.go:234] ["1 delete success: 1200"] [2024/04/26 19:32:56.428 +08:00] [INFO] [main.go:220] ["0 insert success: 2400"] [2024/04/26 19:32:56.446 +08:00] [INFO] [main.go:234] ["0 delete success: 1200"] [Pipeline] // stage [Pipeline] } [2024/04/26 19:32:56.568 +08:00] [INFO] [main.go:220] ["1 insert success: 2500"] [2024/04/26 19:32:56.623 +08:00] [INFO] [main.go:220] ["0 insert success: 2500"] table test.finish_mark not exists for 1-th check, retry later Verifying downstream PD is started... Verifying downstream PD is started... Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table changefeed_auto_stop_1.usertable not exists for 2-th check, retry later Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table test.finish_mark exists check diff successfully cdc.test: no process found wait process cdc.test exit for 1-th time... process cdc.test already exit [Fri Apr 26 19:32:59 CST 2024] <<<<<< run test case mq_sink_dispatcher success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 2118288ceb3f52a4a8431acba4fc2c560def0732 Git Commit Branch: master UTC Build Time: 2024-04-25 21:05:46 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table changefeed_auto_stop_1.usertable exists table changefeed_auto_stop_2.usertable not exists for 1-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/04/26 19:32:57.141 +08:00] [WARN] [diff.go:182] ["table struct is not equal"] [reason="column num not equal, one is 5 another is 4"] [Fri Apr 26 19:32:59 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>> schema registry uri found: 10 [Fri Apr 26 19:32:59 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>> schema registry uri found: 20 [Fri Apr 26 19:32:59 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>> schema registry uri found: finish table test.finish not exists for 1-th check, retry later table test.finish not exists for 2-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-51-g944fff519c Edition: Community Git Commit Hash: 944fff519c90039747affb94067439ff9541f2fb Git Branch: master UTC Build Time: 2024-04-26 09:54:14 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table changefeed_auto_stop_2.usertable exists table changefeed_auto_stop_3.usertable not exists for 1-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only_avro/run.sh: line 1: 12282 Killed cdc_kafka_consumer --upstream-uri $SINK_URI --downstream-uri="mysql://root@127.0.0.1:3306/?safe-mode=true&batch-dml-enable=false" --upstream-tidb-dsn="root@tcp(${UP_TIDB_HOST}:${UP_TIDB_PORT})/?" --config="$CUR/conf/changefeed.toml" 2>&1 [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] // cache [Pipeline] } [Pipeline] // dir Post stage [Pipeline] sh [2024/04/26 19:33:02.568 +08:00] [INFO] [main.go:86] ["running ddl test: 2 addDropColumnDDL"] [2024/04/26 19:33:02.785 +08:00] [INFO] [main.go:220] ["0 insert success: 100"] [2024/04/26 19:33:02.789 +08:00] [INFO] [main.go:220] ["1 insert success: 100"] [2024/04/26 19:33:02.983 +08:00] [INFO] [main.go:234] ["0 delete success: 100"] [2024/04/26 19:33:02.984 +08:00] [INFO] [main.go:220] ["0 insert success: 200"] [2024/04/26 19:33:02.994 +08:00] [INFO] [main.go:220] ["1 insert success: 200"] [2024/04/26 19:33:02.996 +08:00] [INFO] [main.go:234] ["1 delete success: 100"] [2024/04/26 19:33:03.187 +08:00] [INFO] [main.go:220] ["0 insert success: 300"] [2024/04/26 19:33:03.192 +08:00] [INFO] [main.go:220] ["1 insert success: 300"] + ls /tmp/tidb_cdc_test/ cov.kafka_simple_basic.32283230.out cov.kafka_simple_basic.cli.3284.out cov.kafka_simple_basic.cli.3401.out cov.kafka_simple_basic.cli.3436.out cov.kafka_simple_basic_avro.62296231.out cov.kafka_simple_basic_avro.cli.6290.out cov.kafka_simple_handle_key_only.91149116.out cov.kafka_simple_handle_key_only.cli.9174.out cov.kafka_simple_handle_key_only.cli.9210.out cov.kafka_simple_handle_key_only.cli.9262.out cov.kafka_simple_handle_key_only.cli.9297.out cov.kafka_simple_handle_key_only.cli.9334.out cov.kafka_simple_handle_key_only_avro.cli.12075.out cov.kafka_simple_handle_key_only_avro.cli.12115.out cov.kafka_simple_handle_key_only_avro.cli.12165.out cov.kafka_simple_handle_key_only_avro.cli.12198.out cov.kafka_simple_handle_key_only_avro.cli.12236.out kafka_simple_basic kafka_simple_basic_avro kafka_simple_handle_key_only kafka_simple_handle_key_only_avro simple_handle_key_only simple_handle_key_only_avro sql_res.kafka_simple_basic.txt sql_res.kafka_simple_basic_avro.txt sql_res.kafka_simple_handle_key_only.txt sql_res.kafka_simple_handle_key_only_avro.txt ++ find /tmp/tidb_cdc_test/ -type f -name '*.log' + tar -cvzf log-G01.tar.gz /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_kafka_consumer.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv1/db/000005.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/pd1/hot-region/000001.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/pd1/region-meta/000001.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv3/db/000005.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tidb.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/down_pd/hot-region/000001.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/down_pd/region-meta/000001.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv_down/db/000005.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv_down.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv2/db/000005.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/down_pd.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tidb_other.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv2.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv1.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0000/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0005/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0006/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0002/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0001/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0003/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0004/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0007/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv3.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tidb-slow.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tidb_down.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/sync_diff_inspector.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/stdout.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/log/server.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/log/proxy.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/log/error.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/db/proxy/db/000005.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/pd1.log /tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log /tmp/tidb_cdc_test/kafka_simple_basic/cdc_kafka_consumer.log /tmp/tidb_cdc_test/kafka_simple_basic/tidb.log /tmp/tidb_cdc_test/kafka_simple_basic/tikv_down.log /tmp/tidb_cdc_test/kafka_simple_basic/down_pd.log /tmp/tidb_cdc_test/kafka_simple_basic/tidb_other.log /tmp/tidb_cdc_test/kafka_simple_basic/cdc_kafka_consumer_stdout.log /tmp/tidb_cdc_test/kafka_simple_basic/tikv2.log /tmp/tidb_cdc_test/kafka_simple_basic/tikv1.log /tmp/tidb_cdc_test/kafka_simple_basic/tikv3.log /tmp/tidb_cdc_test/kafka_simple_basic/tidb-slow.log /tmp/tidb_cdc_test/kafka_simple_basic/tidb_down.log /tmp/tidb_cdc_test/kafka_simple_basic/sync_diff_inspector.log /tmp/tidb_cdc_test/kafka_simple_basic/stdout.log /tmp/tidb_cdc_test/kafka_simple_basic/cdc.log /tmp/tidb_cdc_test/kafka_simple_basic/pd1.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/cdc_kafka_consumer.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tidb.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tikv_down.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/down_pd.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tidb_other.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/cdc_kafka_consumer_stdout.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tikv2.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tikv1.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tikv3.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tidb-slow.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tidb_down.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/sync_diff_inspector.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/stdout.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/cdc.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/pd1.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/cdc_kafka_consumer.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tidb.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tikv_down.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/down_pd.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tidb_other.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tikv2.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tikv1.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tikv3.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tidb-slow.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tidb_down.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/sync_diff_inspector.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/stdout.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/cdc.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/pd1.log tar: Removing leading `/' from member names /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_kafka_consumer.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv1/db/000005.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/pd1/hot-region/000001.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/pd1/region-meta/000001.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv3/db/000005.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tidb.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/down_pd/hot-region/000001.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/down_pd/region-meta/000001.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv_down/db/000005.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv_down.log [2024/04/26 19:33:03.384 +08:00] [INFO] [main.go:234] ["0 delete success: 200"] [2024/04/26 19:33:03.385 +08:00] [INFO] [main.go:220] ["1 insert success: 400"] [2024/04/26 19:33:03.385 +08:00] [INFO] [main.go:220] ["0 insert success: 400"] [2024/04/26 19:33:03.387 +08:00] [INFO] [main.go:234] ["1 delete success: 200"] /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv2/db/000005.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/down_pd.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tidb_other.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv2.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv1.log table test.finish exists check diff successfully [2024/04/26 19:33:03.577 +08:00] [INFO] [main.go:220] ["1 insert success: 500"] [2024/04/26 19:33:03.582 +08:00] [INFO] [main.go:220] ["0 insert success: 500"] /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0000/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0005/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0006/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0002/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0001/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0003/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0004/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data/tmp/sorter/0007/000002.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tikv3.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tidb-slow.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tidb_down.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/sync_diff_inspector.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/stdout.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/log/server.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/log/proxy.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/log/error.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/db/proxy/db/000005.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/pd1.log wait process cdc.test exit for 1-th time... [2024/04/26 19:33:03.770 +08:00] [INFO] [main.go:220] ["1 insert success: 600"] [2024/04/26 19:33:03.772 +08:00] [INFO] [main.go:234] ["1 delete success: 300"] [2024/04/26 19:33:03.783 +08:00] [INFO] [main.go:234] ["0 delete success: 300"] [2024/04/26 19:33:03.783 +08:00] [INFO] [main.go:220] ["0 insert success: 600"] [2024/04/26 19:33:03.964 +08:00] [INFO] [main.go:220] ["1 insert success: 700"] [2024/04/26 19:33:03.980 +08:00] [INFO] [main.go:220] ["0 insert success: 700"] table changefeed_auto_stop_3.usertable exists table changefeed_auto_stop_4.usertable not exists for 1-th check, retry later /tmp/tidb_cdc_test/simple_handle_key_only_avro/output/sync_diff.log /tmp/tidb_cdc_test/kafka_simple_basic/cdc_kafka_consumer.log /tmp/tidb_cdc_test/kafka_simple_basic/tidb.log /tmp/tidb_cdc_test/kafka_simple_basic/tikv_down.log VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b891ec0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1, pid:16548, start at 2024-04-26 19:33:02.761626137 +0800 CST m=+5.109712927 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:35:02.768 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:33:02.765 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:23:02.765 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b891ec0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1, pid:16548, start at 2024-04-26 19:33:02.761626137 +0800 CST m=+5.109712927 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:35:02.768 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:33:02.765 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:23:02.765 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b893580014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-7lxf9-61xc1, pid:16631, start at 2024-04-26 19:33:02.827933877 +0800 CST m=+5.127015522 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:35:02.834 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:33:02.806 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:23:02.806 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/proxy.log"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b64d600016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0, pid:29498, start at 2024-04-26 19:32:25.609003152 +0800 CST m=+5.255828013 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:34:25.619 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 60m All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:32:25.610 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:22:25.610 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b64d600016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0, pid:29498, start at 2024-04-26 19:32:25.609003152 +0800 CST m=+5.255828013 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:34:25.619 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 60m All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:32:25.610 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:22:25.610 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b64e0c0015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-3glp8-ht8w0, pid:29578, start at 2024-04-26 19:32:25.648301529 +0800 CST m=+5.240509956 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:34:25.655 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 60m All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:32:25.653 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:22:25.653 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 1396 (HY000) at line 1: Operation CREATE USER failed for 'normal'@'%' start tidb cluster failed The 2 times to try to start tidb cluster... [2024/04/26 19:33:04.173 +08:00] [INFO] [main.go:220] ["1 insert success: 800"] [2024/04/26 19:33:04.176 +08:00] [INFO] [main.go:234] ["1 delete success: 400"] [2024/04/26 19:33:04.184 +08:00] [INFO] [main.go:234] ["0 delete success: 400"] [2024/04/26 19:33:04.185 +08:00] [INFO] [main.go:220] ["0 insert success: 800"] /tmp/tidb_cdc_test/kafka_simple_basic/down_pd.log /tmp/tidb_cdc_test/kafka_simple_basic/tidb_other.log /tmp/tidb_cdc_test/kafka_simple_basic/cdc_kafka_consumer_stdout.log /tmp/tidb_cdc_test/kafka_simple_basic/tikv2.log /tmp/tidb_cdc_test/kafka_simple_basic/tikv1.log cdc.test: no process found wait process cdc.test exit for 2-th time... process cdc.test already exit [Fri Apr 26 19:33:04 CST 2024] <<<<<< run test case multi_topics_v2 success! >>>>>> [2024/04/26 19:33:04.370 +08:00] [INFO] [main.go:220] ["1 insert success: 900"] [2024/04/26 19:33:04.379 +08:00] [INFO] [main.go:220] ["0 insert success: 900"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/04/26 19:33:04.576 +08:00] [INFO] [main.go:220] ["1 insert success: 1000"] [2024/04/26 19:33:04.578 +08:00] [INFO] [main.go:234] ["1 delete success: 500"] [2024/04/26 19:33:04.586 +08:00] [INFO] [main.go:234] ["0 delete success: 500"] [2024/04/26 19:33:04.587 +08:00] [INFO] [main.go:220] ["0 insert success: 1000"] /tmp/tidb_cdc_test/kafka_simple_basic/tikv3.log /tmp/tidb_cdc_test/kafka_simple_basic/tidb-slow.log /tmp/tidb_cdc_test/kafka_simple_basic/tidb_down.log /tmp/tidb_cdc_test/kafka_simple_basic/sync_diff_inspector.log /tmp/tidb_cdc_test/kafka_simple_basic/stdout.log /tmp/tidb_cdc_test/kafka_simple_basic/cdc.log [2024/04/26 19:33:04.761 +08:00] [INFO] [main.go:220] ["1 insert success: 1100"] [2024/04/26 19:33:04.776 +08:00] [INFO] [main.go:220] ["0 insert success: 1100"] [2024/04/26 19:33:04.953 +08:00] [INFO] [main.go:220] ["1 insert success: 1200"] [2024/04/26 19:33:04.955 +08:00] [INFO] [main.go:234] ["1 delete success: 600"] [2024/04/26 19:33:04.968 +08:00] [INFO] [main.go:234] ["0 delete success: 600"] [2024/04/26 19:33:04.969 +08:00] [INFO] [main.go:220] ["0 insert success: 1200"] /tmp/tidb_cdc_test/kafka_simple_basic/pd1.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/cdc_kafka_consumer.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tidb.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tikv_down.log [2024/04/26 19:33:05.150 +08:00] [INFO] [main.go:220] ["1 insert success: 1300"] [2024/04/26 19:33:05.169 +08:00] [INFO] [main.go:220] ["0 insert success: 1300"] /tmp/tidb_cdc_test/kafka_simple_basic_avro/down_pd.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tidb_other.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/cdc_kafka_consumer_stdout.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tikv2.log [2024/04/26 19:33:05.359 +08:00] [INFO] [main.go:220] ["1 insert success: 1400"] [2024/04/26 19:33:05.361 +08:00] [INFO] [main.go:234] ["0 delete success: 700"] [2024/04/26 19:33:05.362 +08:00] [INFO] [main.go:220] ["0 insert success: 1400"] [2024/04/26 19:33:05.362 +08:00] [INFO] [main.go:234] ["1 delete success: 700"] /tmp/tidb_cdc_test/kafka_simple_basic_avro/tikv1.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tikv3.log [2024/04/26 19:33:05.562 +08:00] [INFO] [main.go:220] ["0 insert success: 1500"] [2024/04/26 19:33:05.575 +08:00] [INFO] [main.go:220] ["1 insert success: 1500"] /tmp/tidb_cdc_test/kafka_simple_basic_avro/tidb-slow.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/tidb_down.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/sync_diff_inspector.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/stdout.log /tmp/tidb_cdc_test/kafka_simple_basic_avro/cdc.log table changefeed_auto_stop_4.usertable exists + cd /tmp/tidb_cdc_test/synced_status_with_redo ++ run_cdc_cli_tso_query 127.0.0.1 2379 + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.cli.18016.out cli tso query --pd=http://127.0.0.1:2379 [2024/04/26 19:33:05.762 +08:00] [INFO] [main.go:234] ["0 delete success: 800"] [2024/04/26 19:33:05.763 +08:00] [INFO] [main.go:220] ["0 insert success: 1600"] [2024/04/26 19:33:05.777 +08:00] [INFO] [main.go:220] ["1 insert success: 1600"] [2024/04/26 19:33:05.779 +08:00] [INFO] [main.go:234] ["1 delete success: 800"] [2024/04/26 19:33:05.968 +08:00] [INFO] [main.go:220] ["0 insert success: 1700"] [2024/04/26 19:33:05.985 +08:00] [INFO] [main.go:220] ["1 insert success: 1700"] /tmp/tidb_cdc_test/kafka_simple_basic_avro/pd1.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/cdc_kafka_consumer.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tidb.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tikv_down.log check diff failed 1-th time, retry later [2024/04/26 19:33:06.172 +08:00] [INFO] [main.go:234] ["0 delete success: 900"] [2024/04/26 19:33:06.173 +08:00] [INFO] [main.go:220] ["0 insert success: 1800"] [2024/04/26 19:33:06.190 +08:00] [INFO] [main.go:220] ["1 insert success: 1800"] [2024/04/26 19:33:06.192 +08:00] [INFO] [main.go:234] ["1 delete success: 900"] /tmp/tidb_cdc_test/kafka_simple_handle_key_only/down_pd.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tidb_other.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tikv2.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tikv1.log [2024/04/26 19:33:06.365 +08:00] [INFO] [main.go:220] ["0 insert success: 1900"] [2024/04/26 19:33:06.389 +08:00] [INFO] [main.go:220] ["1 insert success: 1900"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b8b964000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h8q8j-cjcsw, pid:11057, start at 2024-04-26 19:33:05.254314385 +0800 CST m=+5.189999217 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:35:05.263 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:33:05.241 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:23:05.241 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b8b964000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h8q8j-cjcsw, pid:11057, start at 2024-04-26 19:33:05.254314385 +0800 CST m=+5.189999217 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:35:05.263 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:33:05.241 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:23:05.241 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63c68b8b8840011 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1735-h8q8j-cjcsw, pid:11144, start at 2024-04-26 19:33:05.202487541 +0800 CST m=+5.085346176 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240426-19:35:05.209 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240426-19:33:05.185 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240426-19:23:05.185 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-13-g37701038f Edition: Community Git Commit Hash: 37701038fbd30f5eafac11c3ad180f14c6dcab9e Git Branch: HEAD UTC Build Time: 2024-04-26 10:25:26 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-26 10:30:15 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/resourcecontrol/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/resourcecontrol/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/resourcecontrol/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["37701038fbd30f5eafac11c3ad180f14c6dcab9e"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-13-g37701038f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/resourcecontrol/tiflash/log/proxy.log"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/resourcecontrol/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tikv3.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tidb-slow.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tidb_down.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/sync_diff_inspector.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/stdout.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/cdc.log /tmp/tidb_cdc_test/kafka_simple_handle_key_only/pd1.log + ls -alh log-G01.tar.gz -rw-r--r-- 1 jenkins jenkins 18M Apr 26 19:33 log-G01.tar.gz [Pipeline] archiveArtifacts Archiving artifacts [2024/04/26 19:33:06.565 +08:00] [INFO] [main.go:234] ["0 delete success: 1000"] [2024/04/26 19:33:06.566 +08:00] [INFO] [main.go:220] ["0 insert success: 2000"] [2024/04/26 19:33:06.607 +08:00] [INFO] [main.go:220] ["1 insert success: 2000"] [2024/04/26 19:33:06.609 +08:00] [INFO] [main.go:234] ["1 delete success: 1000"] [2024/04/26 19:33:06.765 +08:00] [INFO] [main.go:220] ["0 insert success: 2100"] [2024/04/26 19:33:06.804 +08:00] [INFO] [main.go:220] ["1 insert success: 2100"] [2024/04/26 19:33:06.951 +08:00] [INFO] [main.go:234] ["0 delete success: 1100"] [2024/04/26 19:33:06.951 +08:00] [INFO] [main.go:220] ["0 insert success: 2200"] [2024/04/26 19:33:06.995 +08:00] [INFO] [main.go:220] ["1 insert success: 2200"] [2024/04/26 19:33:06.998 +08:00] [INFO] [main.go:234] ["1 delete success: 1100"] [2024/04/26 19:33:07.136 +08:00] [INFO] [main.go:220] ["0 insert success: 2300"] [2024/04/26 19:33:07.179 +08:00] [INFO] [main.go:220] ["1 insert success: 2300"] [2024/04/26 19:33:07.313 +08:00] [INFO] [main.go:234] ["0 delete success: 1200"] [2024/04/26 19:33:07.314 +08:00] [INFO] [main.go:220] ["0 insert success: 2400"] [2024/04/26 19:33:07.382 +08:00] [INFO] [main.go:220] ["1 insert success: 2400"] [2024/04/26 19:33:07.384 +08:00] [INFO] [main.go:234] ["1 delete success: 1200"] [2024/04/26 19:33:07.496 +08:00] [INFO] [main.go:220] ["0 insert success: 2500"] + set +x + tso='449349205692252161 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349205692252161 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + start_ts=449349205692252161 + run_cdc_server --workdir /tmp/tidb_cdc_test/synced_status_with_redo --binary cdc.test [Fri Apr 26 19:33:07 CST 2024] <<<<<< START cdc server in synced_status_with_redo case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.1805518057.out server --log-file /tmp/tidb_cdc_test/synced_status_with_redo/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/synced_status_with_redo/cdc_data --cluster-id default + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Recording fingerprints [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resourcecontrol.cli.12471.out cli tso query --pd=http://127.0.0.1:2379 [Pipeline] } [2024/04/26 19:33:07.829 +08:00] [WARN] [diff.go:551] ["checksum is not equal"] [table=`test`.`ntest`] [where="((TRUE) AND TRUE)"] ["source checksum"=0] ["target checksum"=4235625502] ["get source checksum cost"=1.719938ms] ["get target checksum cost"=1.367529ms] [Pipeline] // container check diff successfully wait process cdc.test exit for 1-th time... [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G01' wait process cdc.test exit for 2-th time... Sending interrupt signal to process Killing processes kill finished with exit code 0 Sending interrupt signal to process Killing processes script returned exit code 143 wait process cdc.test exit for 3-th time... kill finished with exit code 0 Sending interrupt signal to process Killing processes kill finished with exit code 0 Sending interrupt signal to process Killing processes kill finished with exit code 0 Sending interrupt signal to process Killing processes script returned exit code 143 + set +x + tso='449349206328213505 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449349206328213505 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Fri Apr 26 19:33:10 CST 2024] <<<<<< START cdc server in resourcecontrol case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resourcecontrol.1250612508.out server --log-file /tmp/tidb_cdc_test/resourcecontrol/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/resourcecontrol/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 kill finished with exit code 0 Sending interrupt signal to process Killing processes script returned exit code 143 kill finished with exit code 0 Sending interrupt signal to process Killing processes script returned exit code 143 script returned exit code 143 {"level":"warn","ts":1714131190.964738,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00215ec40/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: read tcp 127.0.0.1:45132->127.0.0.1:2379: read: connection reset by peer"} + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) > GET /debug/info HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Fri, 26 Apr 2024 11:33:10 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e61be523-8bda-4edb-bff3-2e8b6d2298f6 {"id":"e61be523-8bda-4edb-bff3-2e8b6d2298f6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131188} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2dfe94d0 e61be523-8bda-4edb-bff3-2e8b6d2298f6 /tidb/cdc/default/default/upstream/7362137330202697008 {"id":7362137330202697008,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e61be523-8bda-4edb-bff3-2e8b6d2298f6 {"id":"e61be523-8bda-4edb-bff3-2e8b6d2298f6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131188} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2dfe94d0 e61be523-8bda-4edb-bff3-2e8b6d2298f6 /tidb/cdc/default/default/upstream/7362137330202697008 {"id":7362137330202697008,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e61be523-8bda-4edb-bff3-2e8b6d2298f6 {"id":"e61be523-8bda-4edb-bff3-2e8b6d2298f6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-20-g16f5d59f9","git-hash":"16f5d59f936001f6d7031387873b3c668f3c5ae6","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714131188} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f1a2dfe94d0 e61be523-8bda-4edb-bff3-2e8b6d2298f6 /tidb/cdc/default/default/upstream/7362137330202697008 {"id":7362137330202697008,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + config_path=conf/changefeed-redo.toml + SINK_URI='mysql://root@127.0.0.1:3306/?max-txn-row=1' + run_cdc_cli changefeed create --start-ts=449349205692252161 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/conf/changefeed-redo.toml + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.cli.18120.out cli changefeed create --start-ts=449349205692252161 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/conf/changefeed-redo.toml script returned exit code 143 kill finished with exit code 0 Sending interrupt signal to process Killing processes ++ stop_tidb_cluster [2024/04/26 19:33:11.236 +08:00] [ERROR] [request.go:310] ["failed to send a http request"] [error="Post \"http://127.0.0.1:8300/api/v2/changefeeds\": context canceled"] Error: Post "http://127.0.0.1:8300/api/v2/changefeeds": context canceled warning: GOCOVERDIR not set, no coverage data emitted {"level":"warn","ts":1714131191.2388408,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00177a8c0/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: EOF"} script returned exit code 143 kill finished with exit code 0 Sending interrupt signal to process Killing processes kill finished with exit code 0 [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache ++ stop_tidb_cluster script returned exit code 143 [Pipeline] // cache [Pipeline] // cache [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // cache [Pipeline] } [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } ++ stop_tidb_cluster script returned exit code 143 [Pipeline] // dir [Pipeline] } [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // cache [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // dir [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withCredentials [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // timeout [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // stage [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // container [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // withEnv [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G00' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G02' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G04' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G05' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G06' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G07' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G08' [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // stage [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G10' [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G09' [Pipeline] // parallel [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] End of Pipeline ERROR: script returned exit code 1 Finished: FAILURE