Started by user Jenkins Admin Obtained pipelines/pingcap/tiflow/latest/pull_cdc_integration_kafka_test.groovy from git https://github.com/PingCAP-QE/ci.git Loading library tipipeline@main Library tipipeline@main is cached. Copying from home. [Pipeline] Start of Pipeline [Pipeline] readJSON [Pipeline] readTrusted Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1843-2jbh3-9vqp9 Still waiting to schedule task β€˜pingcap-tiflow-pull-cdc-integration-kafka-test-1843-2jbh3-9vqp9’ is offline Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1843-2jbh3-9vqp9 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1843-2jbh3-419cb --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "52aa30d7d2d84231723116b10b280bff0cd5734f" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1843-2jbh3" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-2jbh3-9vqp9" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-2jbh3-9vqp9" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-2jbh3-9vqp9" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1843-2jbh3-9vqp9 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] stage [Pipeline] { (Declarative: Checkout SCM) [Pipeline] checkout The recommended git tool is: git No credentials specified Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git rev-list --no-walk 03312178c534dce949face80c69812d989e55009 # timeout=10 [Pipeline] } [Pipeline] // stage [Pipeline] withEnv [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] timeout Timeout set to expire in 1 hr 5 min [Pipeline] { [Pipeline] stage [Pipeline] { (Debug info) [Pipeline] sh + printenv PROW_JOB_ID=77d328ba-2501-4ebd-a790-6ed9b1f9c95d JENKINS_NODE_COOKIE=2c6c1dae-5159-44dc-90d9-882412b5b31b BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-2jbh3-9vqp9 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Debug info BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786704691812372484","prowjobid":"77d328ba-2501-4ebd-a790-6ed9b1f9c95d","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/5bf93c6caedff315c4c9650d80e951e31bc88a3d","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct _=/usr/bin/printenv POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test HUDSON_URL=https://do.pingcap.net/jenkins/ JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1843 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786704691812372484 GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=tests SHLVL=3 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-2jbh3 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-236df335481f9578f70eb859f68d5ceead3aa27f6c9385fda1ec4c08661c0305 NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-2jbh3 pingcap-tiflow-pull-cdc-integration-kafka-test-1843-2jbh3-9vqp9 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-2jbh3-9vqp9 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1843 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz + echo ------------------------- ------------------------- + go env GO111MODULE='' GOARCH='amd64' GOBIN='' GOCACHE='/home/jenkins/.cache/go-build' GOENV='/home/jenkins/.config/go/env' GOEXE='' GOEXPERIMENT='' GOFLAGS='' GOHOSTARCH='amd64' GOHOSTOS='linux' GOINSECURE='' GOMODCACHE='/go/pkg/mod' GONOPROXY='' GONOSUMDB='' GOOS='linux' GOPATH='/go' GOPRIVATE='' GOPROXY='http://goproxy.apps.svc,https://proxy.golang.org,direct' GOROOT='/usr/local/go' GOSUMDB='sum.golang.org' GOTMPDIR='' GOTOOLCHAIN='auto' GOTOOLDIR='/usr/local/go/pkg/tool/linux_amd64' GOVCS='' GOVERSION='go1.21.0' GCCGO='gccgo' GOAMD64='v1' AR='ar' CC='gcc' CXX='g++' CGO_ENABLED='1' GOMOD='/dev/null' GOWORK='' CGO_CFLAGS='-O2 -g' CGO_CPPFLAGS='' CGO_CXXFLAGS='-O2 -g' CGO_FFLAGS='-O2 -g' CGO_LDFLAGS='-O2 -g' PKG_CONFIG='pkg-config' GOGCCFLAGS='-fPIC -m64 -pthread -Wl,--no-gc-sections -fmessage-length=0 -fdebug-prefix-map=/tmp/go-build3973578436=/tmp/go-build -gno-record-gcc-switches' + echo ------------------------- ------------------------- + echo 'debug command: kubectl -n jenkins-tiflow exec -ti pingcap-tiflow-pull-cdc-integration-kafka-test-1843-2jbh3-9vqp9 bash' debug command: kubectl -n jenkins-tiflow exec -ti pingcap-tiflow-pull-cdc-integration-kafka-test-1843-2jbh3-9vqp9 bash [Pipeline] container [Pipeline] { [Pipeline] sh + dig github.com ; <<>> DiG 9.18.16 <<>> github.com ;; global options: +cmd ;; Got answer: ;; ->>HEADER<<- opcode: QUERY, status: NOERROR, id: 63992 ;; flags: qr aa rd ra; QUERY: 1, ANSWER: 1, AUTHORITY: 0, ADDITIONAL: 1 ;; OPT PSEUDOSECTION: ; EDNS: version: 0, flags:; udp: 1232 ; COOKIE: fc3af52ae49152ba (echoed) ;; QUESTION SECTION: ;github.com. IN A ;; ANSWER SECTION: github.com. 19 IN A 20.205.243.166 ;; Query time: 1 msec ;; SERVER: 169.254.25.10#53(169.254.25.10) (UDP) ;; WHEN: Sat May 04 10:31:10 UTC 2024 ;; MSG SIZE rcvd: 77 [Pipeline] script [Pipeline] { [Pipeline] } [Pipeline] // script [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Check diff files) [Pipeline] container [Pipeline] { [Pipeline] script [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $token [Pipeline] { [Pipeline] httpRequest Warning: A secret was passed to "httpRequest" using Groovy String interpolation, which is insecure. Affected argument(s) used the following variable(s): [token] See https://jenkins.io/redirect/groovy-string-interpolation for details. HttpMethod: GET URL: https://api.github.com/repos/pingcap/tiflow/pulls/10919/files?page=1&per_page=100 Content-Type: application/json Authorization: ***** Sending request to url: https://api.github.com/repos/pingcap/tiflow/pulls/10919/files?page=1&per_page=100 Response Code: HTTP/1.1 200 OK Success: Status code 200 is in the accepted range: 100:399 [Pipeline] httpRequest Warning: A secret was passed to "httpRequest" using Groovy String interpolation, which is insecure. Affected argument(s) used the following variable(s): [token] See https://jenkins.io/redirect/groovy-string-interpolation for details. HttpMethod: GET URL: https://api.github.com/repos/pingcap/tiflow/pulls/10919/files?page=2&per_page=100 Content-Type: application/json Authorization: ***** Sending request to url: https://api.github.com/repos/pingcap/tiflow/pulls/10919/files?page=2&per_page=100 Response Code: HTTP/1.1 200 OK Success: Status code 200 is in the accepted range: 100:399 [Pipeline] } [Pipeline] // withCredentials [Pipeline] echo pr_diff_files: [cdc/model/kv.go, cdc/model/sink.go, cdc/model/sink_test.go, cdc/processor/processor.go, cdc/processor/sinkmanager/manager.go, cdc/processor/sourcemanager/manager.go, cdc/redo/reader/reader.go, cdc/sink/dmlsink/factory/factory.go, cdc/sink/dmlsink/txn/mysql/dml.go, cdc/sink/dmlsink/txn/mysql/mysql.go, cdc/sink/dmlsink/txn/mysql/mysql_test.go, cmd/kafka-consumer/main.go, cmd/pulsar-consumer/main.go, cmd/storage-consumer/main.go, errors.toml, pkg/applier/redo.go, pkg/applier/redo_test.go, pkg/errors/cdc_errors.go, pkg/errors/helper.go, pkg/sink/codec/open/open_protocol_decoder.go, pkg/sink/codec/open/open_protocol_message.go, tests/integration_tests/_utils/check_sync_diff, tests/integration_tests/changefeed_dup_error_restart/conf/diff_config.toml, tests/integration_tests/changefeed_dup_error_restart/conf/workload, tests/integration_tests/changefeed_dup_error_restart/run.sh, tests/integration_tests/force_replicate_table/run.sh, tests/integration_tests/open_protocol_handle_key_only/data/data.sql, tests/integration_tests/open_protocol_handle_key_only/run.sh, tests/integration_tests/run_group.sh] [Pipeline] echo diff file not matched: cdc/model/kv.go [Pipeline] } [Pipeline] // script [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Checkout) [Pipeline] timeout Timeout set to expire in 10 min [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache Cache restored successfully (git/pingcap/tiflow/rev-be15534-5bf93c6) 203863040 bytes in 1.32 secs (154114364 bytes/sec) [Pipeline] { [Pipeline] retry [Pipeline] { [Pipeline] script [Pipeline] { [Pipeline] sh git version 2.36.6 Reinitialized existing Git repository in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/.git/ .git HEAD is now at 5bf93c6ca f POST git-upload-pack (656 bytes) From https://github.com/pingcap/tiflow = [up to date] master -> origin/master = [up to date] refs/pull/10919/head -> origin/pr/10919/head Previous HEAD position was 5bf93c6ca f HEAD is now at be1553484 codec(ticdc): avro simplify the unit test (#11010) 🚧 Checkouting to base SHA:be1553484fe4c03594eabb8d7435c694e5fd7224... HEAD is now at be1553484 codec(ticdc): avro simplify the unit test (#11010) βœ… Checked. πŸŽ‰ 🧾 HEAD info: be1553484fe4c03594eabb8d7435c694e5fd7224 be1553484 codec(ticdc): avro simplify the unit test (#11010) 2a7a65c6f Support Sequences (#10203) 36e9e1bf6 cli(ticdc): allow client authentication to be enabled without tls (#11005) 🚧 Pre-merge heads of pull requests to base SHA: be1553484fe4c03594eabb8d7435c694e5fd7224 ... Updating be1553484..5bf93c6ca Fast-forward cdc/model/kv.go | 5 + cdc/model/sink.go | 38 ++- cdc/model/sink_test.go | 9 +- cdc/processor/processor.go | 21 +- cdc/processor/sinkmanager/manager.go | 5 + cdc/processor/sourcemanager/manager.go | 66 +++- cdc/redo/reader/reader.go | 21 +- cdc/sink/dmlsink/factory/factory.go | 8 +- cdc/sink/dmlsink/txn/mysql/dml.go | 12 + cdc/sink/dmlsink/txn/mysql/mysql.go | 89 +++--- cdc/sink/dmlsink/txn/mysql/mysql_test.go | 2 +- cmd/kafka-consumer/main.go | 4 +- cmd/pulsar-consumer/main.go | 17 +- cmd/storage-consumer/main.go | 4 +- errors.toml | 5 + pkg/applier/redo.go | 303 +++++++++++++++++- pkg/applier/redo_test.go | 347 ++++++++++++++++++++- pkg/errors/cdc_errors.go | 4 + pkg/errors/helper.go | 19 ++ pkg/sink/codec/open/open_protocol_decoder.go | 1 + pkg/sink/codec/open/open_protocol_message.go | 3 + tests/integration_tests/_utils/check_sync_diff | 2 +- .../conf/diff_config.toml | 29 ++ .../changefeed_dup_error_restart/conf/workload | 13 + .../changefeed_dup_error_restart/run.sh | 54 ++++ .../integration_tests/force_replicate_table/run.sh | 4 +- .../open_protocol_handle_key_only/data/data.sql | 2 +- .../open_protocol_handle_key_only/run.sh | 4 +- tests/integration_tests/run_group.sh | 5 +- 29 files changed, 991 insertions(+), 105 deletions(-) create mode 100644 tests/integration_tests/changefeed_dup_error_restart/conf/diff_config.toml create mode 100644 tests/integration_tests/changefeed_dup_error_restart/conf/workload create mode 100755 tests/integration_tests/changefeed_dup_error_restart/run.sh 🧾 Pre-merged result: 5bf93c6caedff315c4c9650d80e951e31bc88a3d 5bf93c6ca f fe53bc9df f ecb3fedc8 f βœ… Pre merged πŸŽ‰ βœ… ~~~~~All done.~~~~~~ [Pipeline] } [Pipeline] // script [Pipeline] } [Pipeline] // retry [Pipeline] } Cache not saved (git/pingcap/tiflow/rev-be15534-5bf93c6 already exists) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (prepare) [Pipeline] timeout Timeout set to expire in 20 min [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/third_party_download [Pipeline] { [Pipeline] retry [Pipeline] { [Pipeline] sh + cd ../tiflow + ./scripts/download-integration-test-binaries.sh master Download binaries... % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 1056 0 --:--:-- --:--:-- --:--:-- 1051 100 41 100 41 0 0 1054 0 --:--:-- --:--:-- --:--:-- 1051 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 65 0 --:--:-- --:--:-- --:--:-- 65 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 1685 0 --:--:-- --:--:-- --:--:-- 1708 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 317 0 --:--:-- --:--:-- --:--:-- 317 >>> download tidb-server.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/tidb/600b2ed4bf0aa38224a1c4c4c68831820735515c/centos7/tidb-server.tar.gz 2024-05-04 18:31:30 URL:http://fileserver.pingcap.net/download/builds/pingcap/tidb/600b2ed4bf0aa38224a1c4c4c68831820735515c/centos7/tidb-server.tar.gz [536570515/536570515] -> "tmp/tidb-server.tar.gz" [1] >>> download pd-server.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/pd/1679dbca25b3483d1375c7e747da27e99ad77360/centos7/pd-server.tar.gz 2024-05-04 18:31:42 URL:http://fileserver.pingcap.net/download/builds/pingcap/pd/1679dbca25b3483d1375c7e747da27e99ad77360/centos7/pd-server.tar.gz [187372022/187372022] -> "tmp/pd-server.tar.gz" [1] >>> download tikv-server.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/tikv/72a0fd5b00235a7c56014b77ddd933e2a0d33c88/centos7/tikv-server.tar.gz 2024-05-04 18:32:00 URL:http://fileserver.pingcap.net/download/builds/pingcap/tikv/72a0fd5b00235a7c56014b77ddd933e2a0d33c88/centos7/tikv-server.tar.gz [919098782/919098782] -> "tmp/tikv-server.tar.gz" [1] >>> download tiflash.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/tiflash/master/8e170090fad91c94bef8d908e21c195c1d145b02/centos7/tiflash.tar.gz 2024-05-04 18:32:18 URL:http://fileserver.pingcap.net/download/builds/pingcap/tiflash/master/8e170090fad91c94bef8d908e21c195c1d145b02/centos7/tiflash.tar.gz [456057803/456057803] -> "tmp/tiflash.tar.gz" [1] >>> download minio.tar.gz from http://fileserver.pingcap.net/download/minio.tar.gz 2024-05-04 18:32:23 URL:http://fileserver.pingcap.net/download/minio.tar.gz [17718777/17718777] -> "tmp/minio.tar.gz" [1] >>> download go-ycsb from http://fileserver.pingcap.net/download/builds/pingcap/go-ycsb/test-br/go-ycsb 2024-05-04 18:32:24 URL:http://fileserver.pingcap.net/download/builds/pingcap/go-ycsb/test-br/go-ycsb [45975512/45975512] -> "third_bin/go-ycsb" [1] >>> download jq from http://fileserver.pingcap.net/download/builds/pingcap/test/jq-1.6/jq-linux64 2024-05-04 18:32:24 URL:http://fileserver.pingcap.net/download/builds/pingcap/test/jq-1.6/jq-linux64 [3953824/3953824] -> "third_bin/jq" [1] >>> download etcd.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/cdc/etcd-v3.4.7-linux-amd64.tar.gz 2024-05-04 18:32:25 URL:http://fileserver.pingcap.net/download/builds/pingcap/cdc/etcd-v3.4.7-linux-amd64.tar.gz [17310840/17310840] -> "tmp/etcd.tar.gz" [1] >>> download sync_diff_inspector.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/cdc/sync_diff_inspector_hash-d671b084_linux-amd64.tar.gz 2024-05-04 18:32:27 URL:http://fileserver.pingcap.net/download/builds/pingcap/cdc/sync_diff_inspector_hash-d671b084_linux-amd64.tar.gz [79877126/79877126] -> "tmp/sync_diff_inspector.tar.gz" [1] >>> download schema-registry.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/cdc/schema-registry.tar.gz 2024-05-04 18:32:35 URL:http://fileserver.pingcap.net/download/builds/pingcap/cdc/schema-registry.tar.gz [278386006/278386006] -> "tmp/schema-registry.tar.gz" [1] Download SUCCESS + ls -alh ./bin total 1.9G drwxr-sr-x. 6 jenkins jenkins 4.0K May 4 18:32 . drwxr-sr-x. 19 jenkins jenkins 4.0K May 4 18:32 .. drwxr-sr-x. 2 jenkins jenkins 4.0K May 19 2023 bin drwxr-sr-x. 4 jenkins jenkins 4.0K May 10 2023 etc -rwxr-xr-x. 1 jenkins jenkins 17M Apr 2 2020 etcdctl -rwxr-xr-x. 1 jenkins jenkins 44M May 4 18:32 go-ycsb -rwxr-xr-x. 1 jenkins jenkins 3.8M May 4 18:32 jq drwxr-sr-x. 3 jenkins jenkins 4.0K May 10 2023 lib lrwxrwxrwx. 1 jenkins jenkins 13 Apr 30 11:15 libc++.so.1 -> libc++.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 1016K Nov 7 01:00 libc++.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 16 Apr 30 11:15 libc++abi.so.1 -> libc++abi.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 358K Nov 7 01:00 libc++abi.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 13 Apr 30 11:15 libgmssl.so -> libgmssl.so.3 lrwxrwxrwx. 1 jenkins jenkins 15 Apr 30 11:15 libgmssl.so.3 -> libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 2.6M Apr 30 10:34 libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 272M Apr 30 11:16 libtiflash_proxy.so -rwxr-xr-x. 1 jenkins jenkins 50M Jul 29 2020 minio -rwxr-xr-x. 1 jenkins jenkins 37M Apr 30 16:11 pd-api-bench -rwxr-xr-x. 1 jenkins jenkins 44M Apr 30 16:10 pd-ctl -rwxr-xr-x. 1 jenkins jenkins 36M Apr 30 16:10 pd-heartbeat-bench -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:10 pd-recover -rwxr-xr-x. 1 jenkins jenkins 106M Apr 30 16:10 pd-server -rwxr-xr-x. 1 jenkins jenkins 26M Apr 30 16:10 pd-tso-bench -rwxr-xr-x. 1 jenkins jenkins 3.0M Apr 30 16:11 pd-ut -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:10 regions-dump drwxr-sr-x. 4 jenkins jenkins 4.0K May 10 2023 share -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:11 stores-dump -rwxr-xr-x. 1 jenkins jenkins 192M Sep 22 2023 sync_diff_inspector -rwxr-xr-x. 1 jenkins jenkins 208M May 1 10:57 tidb-server -rwxr-xr-x. 1 jenkins jenkins 380M Apr 30 11:15 tiflash -rwxr-xr-x. 1 jenkins jenkins 418M Apr 30 11:29 tikv-server -rwxr-xr-x. 1 jenkins jenkins 2.0M Apr 30 16:11 xprog + make check_third_party_binary /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/tidb-server /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/tikv-server /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/pd-server /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/tiflash /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/pd-ctl /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/sync_diff_inspector /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/go-ycsb /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/etcdctl /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/jq /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/minio /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/bin/schema-registry-start + cd - /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/third_party_download + mkdir -p bin + mv ../tiflow/bin/bin ../tiflow/bin/etc ../tiflow/bin/etcdctl ../tiflow/bin/go-ycsb ../tiflow/bin/jq ../tiflow/bin/lib ../tiflow/bin/libc++.so.1 ../tiflow/bin/libc++.so.1.0 ../tiflow/bin/libc++abi.so.1 ../tiflow/bin/libc++abi.so.1.0 ../tiflow/bin/libgmssl.so ../tiflow/bin/libgmssl.so.3 ../tiflow/bin/libgmssl.so.3.0 ../tiflow/bin/libtiflash_proxy.so ../tiflow/bin/minio ../tiflow/bin/pd-api-bench ../tiflow/bin/pd-ctl ../tiflow/bin/pd-heartbeat-bench ../tiflow/bin/pd-recover ../tiflow/bin/pd-server ../tiflow/bin/pd-tso-bench ../tiflow/bin/pd-ut ../tiflow/bin/regions-dump ../tiflow/bin/share ../tiflow/bin/stores-dump ../tiflow/bin/sync_diff_inspector ../tiflow/bin/tidb-server ../tiflow/bin/tiflash ../tiflow/bin/tikv-server ../tiflow/bin/xprog ./bin/ + ls -alh ./bin total 1.9G drwxr-sr-x. 6 jenkins jenkins 4.0K May 4 18:32 . drwxr-sr-x. 3 jenkins jenkins 4.0K May 4 18:32 .. drwxr-sr-x. 2 jenkins jenkins 4.0K May 19 2023 bin drwxr-sr-x. 4 jenkins jenkins 4.0K May 10 2023 etc -rwxr-xr-x. 1 jenkins jenkins 17M Apr 2 2020 etcdctl -rwxr-xr-x. 1 jenkins jenkins 44M May 4 18:32 go-ycsb -rwxr-xr-x. 1 jenkins jenkins 3.8M May 4 18:32 jq drwxr-sr-x. 3 jenkins jenkins 4.0K May 10 2023 lib lrwxrwxrwx. 1 jenkins jenkins 13 Apr 30 11:15 libc++.so.1 -> libc++.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 1016K Nov 7 01:00 libc++.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 16 Apr 30 11:15 libc++abi.so.1 -> libc++abi.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 358K Nov 7 01:00 libc++abi.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 13 Apr 30 11:15 libgmssl.so -> libgmssl.so.3 lrwxrwxrwx. 1 jenkins jenkins 15 Apr 30 11:15 libgmssl.so.3 -> libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 2.6M Apr 30 10:34 libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 272M Apr 30 11:16 libtiflash_proxy.so -rwxr-xr-x. 1 jenkins jenkins 50M Jul 29 2020 minio -rwxr-xr-x. 1 jenkins jenkins 37M Apr 30 16:11 pd-api-bench -rwxr-xr-x. 1 jenkins jenkins 44M Apr 30 16:10 pd-ctl -rwxr-xr-x. 1 jenkins jenkins 36M Apr 30 16:10 pd-heartbeat-bench -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:10 pd-recover -rwxr-xr-x. 1 jenkins jenkins 106M Apr 30 16:10 pd-server -rwxr-xr-x. 1 jenkins jenkins 26M Apr 30 16:10 pd-tso-bench -rwxr-xr-x. 1 jenkins jenkins 3.0M Apr 30 16:11 pd-ut -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:10 regions-dump drwxr-sr-x. 4 jenkins jenkins 4.0K May 10 2023 share -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:11 stores-dump -rwxr-xr-x. 1 jenkins jenkins 192M Sep 22 2023 sync_diff_inspector -rwxr-xr-x. 1 jenkins jenkins 208M May 1 10:57 tidb-server -rwxr-xr-x. 1 jenkins jenkins 380M Apr 30 11:15 tiflash -rwxr-xr-x. 1 jenkins jenkins 418M Apr 30 11:29 tikv-server -rwxr-xr-x. 1 jenkins jenkins 2.0M Apr 30 16:11 xprog + ./bin/tidb-server -V Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore + ./bin/pd-server -V Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 + ./bin/tikv-server -V TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + ./bin/tiflash --version TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored + ./bin/sync_diff_inspector --version App Name: sync_diff_inspector v2.0 Release Version: v7.4.0 Git Commit Hash: d671b0840063bc2532941f02e02e12627402844c Git Branch: heads/refs/tags/v7.4.0 UTC Build Time: 2023-09-22 03:51:56 Go Version: go1.21.1 [Pipeline] } [Pipeline] // retry [Pipeline] } [Pipeline] // dir [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache Cache not restored (no such key found) [Pipeline] { [Pipeline] sh + ls -alh ./bin total 8.0K drwxr-sr-x. 2 jenkins jenkins 4.0K May 4 18:32 . drwxr-sr-x. 19 jenkins jenkins 4.0K May 4 18:32 .. + '[' -f ./bin/cdc ']' + make cdc CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-86-g5bf93c6ca" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 10:32:38" -X "github.com/pingcap/tiflow/pkg/version.GitHash=5bf93c6caedff315c4c9650d80e951e31bc88a3d" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-86-g5bf93c6ca"' -o bin/cdc ./cmd/cdc go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading github.com/spf13/cobra v1.8.0 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/IBM/sarama v1.41.2 go: downloading go.uber.org/zap v1.27.0 go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/xdg/scram v1.0.5 go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/gin-gonic/gin v1.9.1 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading golang.org/x/sync v0.7.0 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading golang.org/x/net v0.24.0 go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading github.com/tinylib/msgp v1.1.6 go: downloading github.com/coreos/go-semver v0.3.1 go: downloading github.com/swaggo/files v0.0.0-20210815190702-a29dd2bc99b2 go: downloading github.com/swaggo/gin-swagger v1.2.0 go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading github.com/google/btree v1.1.2 go: downloading github.com/DATA-DOG/go-sqlmock v1.5.0 go: downloading github.com/imdario/mergo v0.3.16 go: downloading go.uber.org/atomic v1.11.0 go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading github.com/apache/pulsar-client-go v0.11.0 go: downloading github.com/aws/aws-sdk-go-v2 v1.19.1 go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/KimMachineGun/automemlimit v0.2.4 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading github.com/google/uuid v1.6.0 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading go.etcd.io/etcd/server/v3 v3.5.12 go: downloading golang.org/x/time v0.5.0 go: downloading github.com/robfig/cron v1.2.0 go: downloading github.com/soheilhy/cmux v0.1.5 go: downloading golang.org/x/sys v0.19.0 go: downloading github.com/goccy/go-json v0.10.2 go: downloading github.com/stretchr/testify v1.9.0 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading github.com/hashicorp/golang-lru v0.5.1 go: downloading github.com/benbjohnson/clock v1.3.5 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/YangKeao/seahash v0.0.0-20240229041150-e7bf269c3140 go: downloading github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 go: downloading cloud.google.com/go v0.112.2 go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/phayes/freeport v0.0.0-20180830031419-95f893ade6f2 go: downloading github.com/r3labs/diff v1.1.0 go: downloading github.com/jcmturner/gokrb5/v8 v8.4.4 go: downloading github.com/segmentio/kafka-go v0.4.41-0.20230526171612-f057b1d369cd go: downloading github.com/cenkalti/backoff/v4 v4.2.1 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading github.com/swaggo/swag v1.16.3 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading github.com/go-mysql-org/go-mysql v1.7.1-0.20240314115043-2199dfb0ba98 go: downloading github.com/pingcap/check v0.0.0-20211026125417-57bd13f7b5f0 go: downloading github.com/golang/mock v1.6.0 go: downloading github.com/pingcap/tidb-dashboard v0.0.0-20240326110213-9768844ff5d7 go: downloading github.com/uber-go/atomic v1.4.0 go: downloading github.com/xdg/stringprep v1.0.3 go: downloading golang.org/x/crypto v0.22.0 go: downloading github.com/gavv/monotime v0.0.0-20190418164738-30dba4353424 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/pierrec/lz4/v4 v4.1.18 go: downloading go.etcd.io/etcd/pkg/v3 v3.5.12 go: downloading github.com/containerd/cgroups v1.0.4 go: downloading github.com/philhofer/fwd v1.1.1 go: downloading github.com/gin-contrib/sse v0.1.0 go: downloading github.com/go-playground/validator/v10 v10.14.0 go: downloading github.com/pelletier/go-toml/v2 v2.0.8 go: downloading github.com/ugorji/go/codec v1.2.11 go: downloading google.golang.org/protobuf v1.33.0 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading gorm.io/gorm v1.24.5 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/golang/protobuf v1.5.4 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading github.com/aws/smithy-go v1.13.5 go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading github.com/stretchr/objx v0.5.2 go: downloading golang.org/x/term v0.19.0 go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/eapache/go-resiliency v1.4.0 go: downloading github.com/eapache/go-xerial-snappy v0.0.0-20230731223053-c322873962e3 go: downloading github.com/eapache/queue v1.1.0 go: downloading github.com/hashicorp/go-multierror v1.1.1 go: downloading github.com/jcmturner/gofork v1.7.6 go: downloading github.com/sirupsen/logrus v1.9.3 go: downloading github.com/AthenZ/athenz v1.10.39 go: downloading github.com/bits-and-blooms/bitset v1.4.0 go: downloading github.com/linkedin/goavro/v2 v2.11.1 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading google.golang.org/api v0.170.0 go: downloading github.com/pierrec/lz4 v2.6.1+incompatible go: downloading github.com/jcmturner/dnsutils/v2 v2.0.0 go: downloading github.com/hashicorp/go-uuid v1.0.3 go: downloading github.com/grpc-ecosystem/grpc-gateway v1.16.0 go: downloading github.com/tmc/grpc-websocket-proxy v0.0.0-20220101234140-673ab2c3ae75 go: downloading go.etcd.io/bbolt v1.3.9 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.22.0 go: downloading go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.22.0 go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/sdk v1.22.0 go: downloading sigs.k8s.io/yaml v1.4.0 go: downloading github.com/xdg-go/scram v1.1.2 go: downloading github.com/KyleBanks/depth v1.2.1 go: downloading github.com/go-openapi/spec v0.21.0 go: downloading golang.org/x/tools v0.20.0 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading github.com/cakturk/go-netstat v0.0.0-20200220111822-e5b49efee7a5 go: downloading github.com/Masterminds/semver v1.5.0 go: downloading github.com/siddontang/go-log v0.0.0-20180807004314-8d05993dda07 go: downloading github.com/siddontang/go v0.0.0-20180604090527-bdc77568d726 go: downloading golang.org/x/text v0.14.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading github.com/godbus/dbus/v5 v5.0.4 go: downloading github.com/opencontainers/runtime-spec v1.0.2 go: downloading github.com/cilium/ebpf v0.4.0 go: downloading gorm.io/driver/mysql v1.3.3 go: downloading github.com/gabriel-vasile/mimetype v1.4.2 go: downloading github.com/go-playground/universal-translator v0.18.1 go: downloading github.com/leodido/go-urn v1.2.4 go: downloading github.com/jinzhu/now v1.1.5 go: downloading github.com/joomcode/errorx v1.0.1 go: downloading github.com/glebarez/sqlite v1.7.0 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading github.com/golang/snappy v0.0.4 go: downloading github.com/hashicorp/errwrap v1.0.0 go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading github.com/99designs/keyring v1.2.1 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading github.com/spaolacci/murmur3 v1.1.0 go: downloading golang.org/x/mod v0.17.0 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/jcmturner/rpc/v2 v2.0.3 go: downloading github.com/jcmturner/aescts/v2 v2.0.0 go: downloading github.com/aws/aws-sdk-go-v2/config v1.18.30 go: downloading github.com/aws/aws-sdk-go-v2/credentials v1.13.29 go: downloading github.com/aws/aws-sdk-go-v2/service/glue v1.58.1 go: downloading github.com/jarcoal/httpmock v1.2.0 go: downloading github.com/mailru/easyjson v0.7.7 go: downloading github.com/gorilla/websocket v1.5.1 go: downloading go.etcd.io/etcd/raft/v3 v3.5.12 go: downloading github.com/xiang90/probing v0.0.0-20221125231312-a49e3df8f510 go: downloading github.com/jonboulle/clockwork v0.4.0 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading go.opentelemetry.io/proto/otlp v1.1.0 go: downloading github.com/xdg-go/pbkdf2 v1.0.0 go: downloading github.com/xdg-go/stringprep v1.0.4 go: downloading github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/shopspring/decimal v1.3.0 go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/blacktear23/go-proxyprotocol v1.0.6 go: downloading github.com/pingcap/fn v1.0.0 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/jinzhu/inflection v1.0.0 go: downloading github.com/tidwall/btree v1.7.0 go: downloading github.com/glebarez/go-sqlite v1.21.2 go: downloading github.com/edwingeng/deque v0.0.0-20191220032131-8596380dee17 go: downloading github.com/go-ozzo/ozzo-validation/v4 v4.3.0 go: downloading github.com/tiancaiamao/appdash v0.0.0-20181126055449-889f96f722a2 go: downloading github.com/sourcegraph/appdash-data v0.0.0-20151005221446-73f23eafcf67 go: downloading github.com/go-logr/logr v1.4.1 go: downloading github.com/ardielle/ardielle-go v1.5.2 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading github.com/dvsekhvalnov/jose2go v1.5.0 go: downloading github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c go: downloading github.com/mtibben/percent v0.2.1 go: downloading github.com/go-playground/locales v0.14.1 go: downloading github.com/golang-jwt/jwt/v4 v4.5.0 go: downloading go.etcd.io/etcd/client/v2 v2.305.12 go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading github.com/go-logr/stdr v1.2.2 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.6 go: downloading github.com/aws/aws-sdk-go-v2/internal/ini v1.3.37 go: downloading github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.14 go: downloading github.com/aws/aws-sdk-go-v2/service/sso v1.12.14 go: downloading github.com/aws/aws-sdk-go-v2/service/sts v1.20.1 go: downloading github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading github.com/go-openapi/jsonpointer v0.21.0 go: downloading github.com/go-openapi/jsonreference v0.21.0 go: downloading github.com/go-openapi/swag v0.23.0 go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.36 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading github.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading modernc.org/libc v1.37.1 go: downloading modernc.org/sqlite v1.27.0 go: downloading github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.30 go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.30 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/apache/thrift v0.16.0 go: downloading github.com/vbauerster/mpb/v7 v7.5.3 go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/jfcg/sixb v1.3.8 go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/kr/text v0.2.0 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading k8s.io/api v0.28.6 go: downloading github.com/emirpasic/gods v1.18.1 go: downloading github.com/josharian/intern v1.0.0 go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading k8s.io/apimachinery v0.28.6 go: downloading gopkg.in/inf.v0 v0.9.1 go: downloading k8s.io/klog/v2 v2.120.1 go: downloading github.com/google/gofuzz v1.2.0 go: downloading sigs.k8s.io/structured-merge-diff/v4 v4.4.1 go: downloading sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd go: downloading k8s.io/utils v0.0.0-20230726121419-3b25d923346b go: downloading modernc.org/memory v1.7.2 go: downloading modernc.org/mathutil v1.6.0 go: downloading github.com/jmespath/go-jmespath v0.4.0 go: downloading github.com/google/s2a-go v0.1.7 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/felixge/httpsnoop v1.0.4 + '[' -f ./bin/cdc_kafka_consumer ']' + make kafka_consumer CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-86-g5bf93c6ca" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 10:34:38" -X "github.com/pingcap/tiflow/pkg/version.GitHash=5bf93c6caedff315c4c9650d80e951e31bc88a3d" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-86-g5bf93c6ca"' -o bin/cdc_kafka_consumer ./cmd/kafka-consumer/main.go + '[' -f ./bin/cdc_storage_consumer ']' + make storage_consumer CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-86-g5bf93c6ca" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 10:34:45" -X "github.com/pingcap/tiflow/pkg/version.GitHash=5bf93c6caedff315c4c9650d80e951e31bc88a3d" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-86-g5bf93c6ca"' -o bin/cdc_storage_consumer ./cmd/storage-consumer/main.go + '[' -f ./bin/cdc.test ']' + make integration_test_build cd tools/check && GO111MODULE=on go build -mod=mod -o ../bin/failpoint-ctl github.com/pingcap/failpoint/failpoint-ctl go: downloading github.com/pingcap/failpoint v0.0.0-20210316064728-7acb0f0a3dfd go: downloading github.com/sergi/go-diff v1.1.0 CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-86-g5bf93c6ca" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 10:34:52" -X "github.com/pingcap/tiflow/pkg/version.GitHash=5bf93c6caedff315c4c9650d80e951e31bc88a3d" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-86-g5bf93c6ca"' -o bin/cdc_storage_consumer ./cmd/storage-consumer/main.go CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-86-g5bf93c6ca" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 10:34:52" -X "github.com/pingcap/tiflow/pkg/version.GitHash=5bf93c6caedff315c4c9650d80e951e31bc88a3d" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-86-g5bf93c6ca"' -o bin/cdc_kafka_consumer ./cmd/kafka-consumer/main.go CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-86-g5bf93c6ca" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 10:34:52" -X "github.com/pingcap/tiflow/pkg/version.GitHash=5bf93c6caedff315c4c9650d80e951e31bc88a3d" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-86-g5bf93c6ca"' -o bin/cdc_pulsar_consumer ./cmd/pulsar-consumer/main.go CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-86-g5bf93c6ca" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 10:34:52" -X "github.com/pingcap/tiflow/pkg/version.GitHash=5bf93c6caedff315c4c9650d80e951e31bc88a3d" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-86-g5bf93c6ca"' -o bin/oauth2-server ./cmd/oauth2-server/main.go go: downloading github.com/go-oauth2/oauth2/v4 v4.5.2 go: downloading github.com/tidwall/buntdb v1.3.0 go: downloading github.com/tidwall/rtred v0.1.2 go: downloading github.com/tidwall/grect v0.1.4 go: downloading github.com/tidwall/gjson v1.14.3 go: downloading github.com/tidwall/match v1.1.1 go: downloading github.com/tidwall/tinyqueue v0.1.1 go: downloading github.com/tidwall/pretty v1.2.0 $(echo $(for p in $(go list ./... | grep -vE 'vendor|proto|tiflow/tests|integration|testing_utils|pb|pbmock|tiflow/bin'); do echo ${p#"github.com/pingcap/tiflow/"}|grep -v "github.com/pingcap/tiflow"; done) | xargs tools/bin/failpoint-ctl enable >/dev/null) go: downloading github.com/DataDog/zstd v1.5.5 go: downloading github.com/PingCAP-QE/go-sqlsmith v0.0.0-20231213065948-336e064b488d go: downloading github.com/chzyer/readline v1.5.1 go: downloading github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 go: downloading github.com/deepmap/oapi-codegen v1.9.0 go: downloading github.com/gogo/gateway v1.1.0 go: downloading github.com/getkin/kin-openapi v0.80.0 go: downloading github.com/syndtr/goleveldb v1.0.1-0.20210305035536-64b5b1c73954 go: downloading github.com/shurcooL/httpgzip v0.0.0-20190720172056-320755c1c1b0 go: downloading github.com/mattn/go-shellwords v1.0.12 go: downloading go.uber.org/dig v1.13.0 go: downloading go.uber.org/ratelimit v0.2.0 go: downloading github.com/VividCortex/mysqlerr v1.0.0 go: downloading github.com/ngaut/log v0.0.0-20210830112240-0124ec040aeb go: downloading github.com/bradleyjkemp/grpc-tools v0.2.5 go: downloading go.uber.org/goleak v1.3.0 go: downloading github.com/integralist/go-findroot v0.0.0-20160518114804-ac90681525dc go: downloading upper.io/db.v3 v3.7.1+incompatible go: downloading github.com/jmoiron/sqlx v1.3.3 go: downloading github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129 go: downloading github.com/improbable-eng/grpc-web v0.12.0 go: downloading github.com/ghodss/yaml v1.0.0 go: downloading github.com/rs/cors v1.7.0 go: downloading github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f CGO_ENABLED=1 GO111MODULE=on go test -p 3 --race --tags=intest -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-86-g5bf93c6ca" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 10:34:52" -X "github.com/pingcap/tiflow/pkg/version.GitHash=5bf93c6caedff315c4c9650d80e951e31bc88a3d" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-86-g5bf93c6ca"' -c -cover -covermode=atomic \ -coverpkg=github.com/pingcap/tiflow/... \ -o bin/cdc.test github.com/pingcap/tiflow/cmd/cdc \ || { $(echo $(for p in $(go list ./... | grep -vE 'vendor|proto|tiflow/tests|integration|testing_utils|pb|pbmock|tiflow/bin'); do echo ${p#"github.com/pingcap/tiflow/"}|grep -v "github.com/pingcap/tiflow"; done) | xargs tools/bin/failpoint-ctl disable >/dev/null); exit 1; } CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-86-g5bf93c6ca" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 10:34:52" -X "github.com/pingcap/tiflow/pkg/version.GitHash=5bf93c6caedff315c4c9650d80e951e31bc88a3d" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-86-g5bf93c6ca"' -o bin/cdc ./cmd/cdc/main.go \ || { $(echo $(for p in $(go list ./... | grep -vE 'vendor|proto|tiflow/tests|integration|testing_utils|pb|pbmock|tiflow/bin'); do echo ${p#"github.com/pingcap/tiflow/"}|grep -v "github.com/pingcap/tiflow"; done) | xargs tools/bin/failpoint-ctl disable >/dev/null); exit 1; } $(echo $(for p in $(go list ./... | grep -vE 'vendor|proto|tiflow/tests|integration|testing_utils|pb|pbmock|tiflow/bin'); do echo ${p#"github.com/pingcap/tiflow/"}|grep -v "github.com/pingcap/tiflow"; done) | xargs tools/bin/failpoint-ctl disable >/dev/null) + ls -alh ./bin total 1.2G drwxr-sr-x. 2 jenkins jenkins 4.0K May 4 18:39 . drwxr-sr-x. 19 jenkins jenkins 4.0K May 4 18:32 .. -rwxr-xr-x. 1 jenkins jenkins 220M May 4 18:39 cdc -rwxr-xr-x. 1 jenkins jenkins 359M May 4 18:39 cdc.test -rwxr-xr-x. 1 jenkins jenkins 183M May 4 18:35 cdc_kafka_consumer -rwxr-xr-x. 1 jenkins jenkins 183M May 4 18:35 cdc_pulsar_consumer -rwxr-xr-x. 1 jenkins jenkins 182M May 4 18:35 cdc_storage_consumer -rwxr-xr-x. 1 jenkins jenkins 12M May 4 18:35 oauth2-server + ./bin/cdc version Release Version: v8.2.0-alpha-86-g5bf93c6ca Git Commit Hash: 5bf93c6caedff315c4c9650d80e951e31bc88a3d Git Branch: HEAD UTC Build Time: 2024-05-04 10:34:52 Go Version: go version go1.21.0 linux/amd64 Failpoint Build: true [Pipeline] } Cache saved successfully (binary/pingcap/tiflow/cdc-integration-test/rev-be15534-5bf93c6) 1191771136 bytes in 17.77 secs (67058753 bytes/sec) [Pipeline] // cache [Pipeline] cache Cache not restored (no such key found) [Pipeline] { [Pipeline] sh + cp -r ../third_party_download/bin/bin ../third_party_download/bin/etc ../third_party_download/bin/etcdctl ../third_party_download/bin/go-ycsb ../third_party_download/bin/jq ../third_party_download/bin/lib ../third_party_download/bin/libc++.so.1 ../third_party_download/bin/libc++.so.1.0 ../third_party_download/bin/libc++abi.so.1 ../third_party_download/bin/libc++abi.so.1.0 ../third_party_download/bin/libgmssl.so ../third_party_download/bin/libgmssl.so.3 ../third_party_download/bin/libgmssl.so.3.0 ../third_party_download/bin/libtiflash_proxy.so ../third_party_download/bin/minio ../third_party_download/bin/pd-api-bench ../third_party_download/bin/pd-ctl ../third_party_download/bin/pd-heartbeat-bench ../third_party_download/bin/pd-recover ../third_party_download/bin/pd-server ../third_party_download/bin/pd-tso-bench ../third_party_download/bin/pd-ut ../third_party_download/bin/regions-dump ../third_party_download/bin/share ../third_party_download/bin/stores-dump ../third_party_download/bin/sync_diff_inspector ../third_party_download/bin/tidb-server ../third_party_download/bin/tiflash ../third_party_download/bin/tikv-server ../third_party_download/bin/xprog ./bin/ + ls -alh ./bin total 3.0G drwxr-sr-x. 6 jenkins jenkins 4.0K May 4 18:39 . drwxr-sr-x. 19 jenkins jenkins 4.0K May 4 18:32 .. drwxr-sr-x. 2 jenkins jenkins 4.0K May 4 18:39 bin -rwxr-xr-x. 1 jenkins jenkins 220M May 4 18:39 cdc -rwxr-xr-x. 1 jenkins jenkins 359M May 4 18:39 cdc.test -rwxr-xr-x. 1 jenkins jenkins 183M May 4 18:35 cdc_kafka_consumer -rwxr-xr-x. 1 jenkins jenkins 183M May 4 18:35 cdc_pulsar_consumer -rwxr-xr-x. 1 jenkins jenkins 182M May 4 18:35 cdc_storage_consumer drwxr-sr-x. 4 jenkins jenkins 4.0K May 4 18:39 etc -rwxr-xr-x. 1 jenkins jenkins 17M May 4 18:39 etcdctl -rwxr-xr-x. 1 jenkins jenkins 44M May 4 18:39 go-ycsb -rwxr-xr-x. 1 jenkins jenkins 3.8M May 4 18:39 jq drwxr-sr-x. 3 jenkins jenkins 4.0K May 4 18:39 lib lrwxrwxrwx. 1 jenkins jenkins 13 May 4 18:39 libc++.so.1 -> libc++.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 1016K May 4 18:39 libc++.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 16 May 4 18:39 libc++abi.so.1 -> libc++abi.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 358K May 4 18:39 libc++abi.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 13 May 4 18:39 libgmssl.so -> libgmssl.so.3 lrwxrwxrwx. 1 jenkins jenkins 15 May 4 18:39 libgmssl.so.3 -> libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 2.6M May 4 18:39 libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 272M May 4 18:39 libtiflash_proxy.so -rwxr-xr-x. 1 jenkins jenkins 50M May 4 18:39 minio -rwxr-xr-x. 1 jenkins jenkins 12M May 4 18:35 oauth2-server -rwxr-xr-x. 1 jenkins jenkins 37M May 4 18:39 pd-api-bench -rwxr-xr-x. 1 jenkins jenkins 44M May 4 18:39 pd-ctl -rwxr-xr-x. 1 jenkins jenkins 36M May 4 18:39 pd-heartbeat-bench -rwxr-xr-x. 1 jenkins jenkins 32M May 4 18:39 pd-recover -rwxr-xr-x. 1 jenkins jenkins 106M May 4 18:39 pd-server -rwxr-xr-x. 1 jenkins jenkins 26M May 4 18:39 pd-tso-bench -rwxr-xr-x. 1 jenkins jenkins 3.0M May 4 18:39 pd-ut -rwxr-xr-x. 1 jenkins jenkins 32M May 4 18:39 regions-dump drwxr-sr-x. 4 jenkins jenkins 4.0K May 4 18:39 share -rwxr-xr-x. 1 jenkins jenkins 32M May 4 18:39 stores-dump -rwxr-xr-x. 1 jenkins jenkins 192M May 4 18:39 sync_diff_inspector -rwxr-xr-x. 1 jenkins jenkins 208M May 4 18:39 tidb-server -rwxr-xr-x. 1 jenkins jenkins 380M May 4 18:39 tiflash -rwxr-xr-x. 1 jenkins jenkins 418M May 4 18:39 tikv-server -rwxr-xr-x. 1 jenkins jenkins 2.0M May 4 18:39 xprog [Pipeline] } Cache saved successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843/tiflow-cdc) 3727233024 bytes in 90.27 secs (41290157 bytes/sec) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Tests) [Pipeline] parallel [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G00') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G01') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G02') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G03') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G04') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G05') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G06') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G07') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G08') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G09') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G10') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G11') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G12') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G13') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G14') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G15') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G16') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G17') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G00') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G01') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G02') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G03') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G04') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G05') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G06') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G07') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G08') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G09') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G10') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G11') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G12') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G13') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G14') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G15') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G16') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G17') [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1843-92hlc-1tj2z Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1843-vgp2b-mj7jd --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "15e22346e9b8acb4c5accaf55eacb236dfa69682" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1843-vgp2b" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] checkout The recommended git tool is: git Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@1954fc77; decorates RemoteLauncher[hudson.remoting.Channel@4a0e338d:JNLP4-connect connection from 10.233.84.170/10.233.84.170:58312] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1843-lcj6v-rkbch Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1843-rfvt7-rz8h5 Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1843-9prpt-cmcx5 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" [Pipeline] withEnv [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] stage [Pipeline] { (Test) [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1843-dnxvj-w926p Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1843-92hlc-1tj2z is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1843-92hlc-d321k --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "3bbb218e0052db00aff580f364e323f64e58eda4" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1843-92hlc" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-92hlc-1tj2z" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-92hlc-1tj2z" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-92hlc-1tj2z" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1843-92hlc-1tj2z in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1843-lcj6v-rkbch is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1843-lcj6v-sk3p2 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "3d38884cb4a7bb17e0957f85c0f1e5d59277a3ef" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1843-lcj6v" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-lcj6v-rkbch" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-lcj6v-rkbch" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-lcj6v-rkbch" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1843-lcj6v-rkbch in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1843-9prpt-cmcx5 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1843-9prpt-33lvs --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "45ed3a015e468ae572ae13fc63bb95bdfbcb2934" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1843-9prpt" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-9prpt-cmcx5" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-9prpt-cmcx5" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-9prpt-cmcx5" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1843-9prpt-cmcx5 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1843-rfvt7-rz8h5 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1843-rfvt7-rqdl0 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "0b27ffc2f55bb414db7b9732ee52fbbf8dffe33f" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1843-rfvt7" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-rfvt7-rz8h5" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-rfvt7-rz8h5" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-rfvt7-rz8h5" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1843-rfvt7-rz8h5 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843/tiflow-cdc) 3727233024 bytes in 7.51 secs (496258527 bytes/sec) [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] { [Pipeline] node [Pipeline] node [Pipeline] node [Pipeline] node [Pipeline] node [Pipeline] node [Pipeline] node [Pipeline] node [Pipeline] node [Pipeline] node [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] { [Pipeline] sh [Pipeline] { [Pipeline] checkout Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] { The recommended git tool is: git [Pipeline] checkout [Pipeline] checkout The recommended git tool is: git The recommended git tool is: git + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] checkout The recommended git tool is: git [Pipeline] podTemplate [Pipeline] { Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1843-kz8q3-1pch3 Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1843-hwpd5-mn85x Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1843-0ps0r-q3pvp [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1843-jrfkf-h5db1 Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1843-xgxq5-f3x9r Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1843-zthb7-4n100 [Pipeline] } Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1843-nnjsj-fkpsf Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1843-shhrm-fhl4k Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1843-bsrn3-75hrt Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vzvzj-mv0bz [Pipeline] // timeout [Pipeline] } [Pipeline] // container [Pipeline] sh No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@559b7460; decorates RemoteLauncher[hudson.remoting.Channel@6c7b97f1:JNLP4-connect connection from 10.233.105.229/10.233.105.229:52170] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@56f97bc8; decorates RemoteLauncher[hudson.remoting.Channel@128c640d:JNLP4-connect connection from 10.233.86.234/10.233.86.234:42230] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G00 Run cases: bdr_mode capture_suicide_while_balance_table syncpoint hang_sink_suicide server_config_compatibility changefeed_dup_error_restart kafka_big_messages kafka_compression kafka_messages kafka_sink_error_resume mq_sink_lost_callback mq_sink_dispatcher kafka_column_selector kafka_column_selector_avro debezium lossy_ddl storage_csv_update PROW_JOB_ID=77d328ba-2501-4ebd-a790-6ed9b1f9c95d JENKINS_NODE_COOKIE=5b4973aa-4db3-42f7-8af1-c353cfc5774a BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786704691812372484","prowjobid":"77d328ba-2501-4ebd-a790-6ed9b1f9c95d","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/5bf93c6caedff315c4c9650d80e951e31bc88a3d","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1843 TEST_GROUP=G00 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786704691812372484 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-vgp2b GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh pingcap_tiflow_pull_cdc_integration_kafka_test_1843-vgp2b GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1843 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/bdr_mode/run.sh using Sink-Type: kafka... <<================= [Sat May 4 18:41:55 CST 2024] <<<<<< run test case bdr_mode success! >>>>>> No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@7c37678b; decorates RemoteLauncher[hudson.remoting.Channel@39311b86:JNLP4-connect connection from 10.233.108.164/10.233.108.164:54902] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@37c5afc2; decorates RemoteLauncher[hudson.remoting.Channel@316ecbde:JNLP4-connect connection from 10.233.72.15/10.233.72.15:39644] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1843-dqmt8-35c8s Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" [Pipeline] withEnv [Pipeline] { Commit message: "fix(br): use failpoint tidb-server instead (#2951)" [Pipeline] container > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 [Pipeline] { [Pipeline] withEnv Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) [Pipeline] { Still waiting to schedule task Waiting for next available executor on β€˜pingcap-tiflow-pull-cdc-integration-kafka-test-1843-dnxvj-w926p’ [Pipeline] container [Pipeline] { [Pipeline] stage [Pipeline] { (Test) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" [Pipeline] stage [Pipeline] { (Test) [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] stage [Pipeline] { (Test) [Pipeline] withCredentials Avoid second fetch Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Commit message: "fix(br): use failpoint tidb-server instead (#2951)" find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/capture_suicide_while_balance_table/run.sh using Sink-Type: kafka... <<================= [Sat May 4 18:41:58 CST 2024] <<<<<< run test case capture_suicide_while_balance_table success! >>>>>> > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1843-bsrn3-75hrt is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1843-bsrn3-dcd7f --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "841e651dbec3d3c65f67b1775c485045f8cb9ef2" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1843-bsrn3" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-bsrn3-75hrt" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-bsrn3-75hrt" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-bsrn3-75hrt" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1843-bsrn3-75hrt in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1843-hwpd5-mn85x is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1843-hwpd5-w1fsw --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "21366884b06f8bd1868e3915d63d42353da624bd" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1843-hwpd5" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-hwpd5-mn85x" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-hwpd5-mn85x" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-hwpd5-mn85x" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1843-hwpd5-mn85x in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/syncpoint/run.sh using Sink-Type: kafka... <<================= kafka downstream isn't support syncpoint record [Sat May 4 18:42:01 CST 2024] <<<<<< run test case syncpoint success! >>>>>> Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1843-kz8q3-1pch3 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1843-kz8q3-1ft1j --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "b3e7cc7e523a7762ac2c0ce3485158106b718736" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1843-kz8q3" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-kz8q3-1pch3" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-kz8q3-1pch3" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-kz8q3-1pch3" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1843-kz8q3-1pch3 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1843-0ps0r-q3pvp is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1843-0ps0r-19f1x --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "578196a2645d313ab40a5e8b1ea8e6e383356e81" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1843-0ps0r" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-0ps0r-q3pvp" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-0ps0r-q3pvp" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-0ps0r-q3pvp" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1843-0ps0r-q3pvp in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1843-cn5n8-5n6zh Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vzvzj-mv0bz is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1843-vzvzj-cc6dg --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "2fa1a94ed60007c930928bc7bea4392aa6ffedef" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1843-vzvzj" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vzvzj-mv0bz" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vzvzj-mv0bz" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vzvzj-mv0bz" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vzvzj-mv0bz in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1843-shhrm-fhl4k is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1843-shhrm-9ldgp --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "73c9910fc9a745566b8cb68ba5cde135fa2c8c09" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1843-shhrm" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-shhrm-fhl4k" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-shhrm-fhl4k" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-shhrm-fhl4k" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1843-shhrm-fhl4k in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1843-dnxvj-w926p is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1843-dnxvj-lhxc7 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "fb072c75221d1a8c33e80212c686ee9bf14f0943" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1843-dnxvj" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-dnxvj-w926p" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-dnxvj-w926p" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-dnxvj-w926p" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1843-dnxvj-w926p in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1843-xgxq5-f3x9r is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1843-xgxq5-vjm7k --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "1c728392ba21ec113b91e63de2e2987c42a31ce7" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1843-xgxq5" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-xgxq5-f3x9r" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-xgxq5-f3x9r" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-xgxq5-f3x9r" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1843-xgxq5-f3x9r in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1843-dqmt8-35c8s is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1843-dqmt8-m1cdr --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "4409ba3dfab19b6814099085a123f6f84b6d4276" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1843-dqmt8" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-dqmt8-35c8s" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-dqmt8-35c8s" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-dqmt8-35c8s" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1843-dqmt8-35c8s in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/hang_sink_suicide/run.sh using Sink-Type: kafka... <<================= [Sat May 4 18:42:04 CST 2024] <<<<<< run test case hang_sink_suicide success! >>>>>> Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1843-jrfkf-h5db1 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1843-jrfkf-w0zj0 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "1f84f258ee6e91162b88e213ed9b2b7252fccd05" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1843-jrfkf" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-jrfkf-h5db1" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-jrfkf-h5db1" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-jrfkf-h5db1" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1843-jrfkf-h5db1 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1843-zthb7-4n100 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1843-zthb7-6hvh0 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "a8d88fc0d7914d82e354daaea593be5f212b4bda" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1843-zthb7" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-zthb7-4n100" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-zthb7-4n100" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-zthb7-4n100" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1843-zthb7-4n100 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843/tiflow-cdc) 3727233024 bytes in 7.74 secs (481790909 bytes/sec) [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] sh find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/server_config_compatibility/run.sh using Sink-Type: kafka... <<================= [Sat May 4 18:42:07 CST 2024] <<<<<< run test case server_config_compatibility success! >>>>>> [Pipeline] cache Still waiting to schedule task β€˜pingcap-tiflow-pull-cdc-integration-kafka-test-1843-nnjsj-fkpsf’ is offline Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1843-nnjsj-fkpsf is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1843-nnjsj-p5dkp --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "1c2588bbade35448d1652d95dacb51b29c05db97" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1843-nnjsj" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-nnjsj-fkpsf" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-nnjsj-fkpsf" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-nnjsj-fkpsf" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1843-nnjsj-fkpsf in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 Still waiting to schedule task β€˜pingcap-tiflow-pull-cdc-integration-kafka-test-1843-cn5n8-5n6zh’ is offline Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1843-cn5n8-5n6zh is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1843-cn5n8-fndtp --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "a9ec2d2faa7130ca9f8d5a7e0ac83cf05dca8814" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1843-cn5n8" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-cn5n8-5n6zh" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-cn5n8-5n6zh" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1843-cn5n8-5n6zh" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1843-cn5n8-5n6zh in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_dup_error_restart/run.sh using Sink-Type: kafka... <<================= [Sat May 4 18:42:10 CST 2024] <<<<<< run test case changefeed_dup_error_restart success! >>>>>> find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_big_messages/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... start tidb cluster in /tmp/tidb_cdc_test/kafka_big_messages Starting Upstream PD... Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843/tiflow-cdc) 3727233024 bytes in 6.78 secs (550041193 bytes/sec) [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] container [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] sh Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... [Pipeline] withEnv [Pipeline] { [Pipeline] container [Pipeline] { + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] } [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] checkout [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout [Pipeline] checkout The recommended git tool is: git The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout [Pipeline] checkout The recommended git tool is: git The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git [Pipeline] stage The recommended git tool is: git [Pipeline] { (Test) [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@46c03ded; decorates RemoteLauncher[hudson.remoting.Channel@7e23dd34:JNLP4-connect connection from 10.233.123.177/10.233.123.177:54978] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@def5d5f; decorates RemoteLauncher[hudson.remoting.Channel@376e719c:JNLP4-connect connection from 10.233.127.62/10.233.127.62:40636] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@12dfe323; decorates RemoteLauncher[hudson.remoting.Channel@5a18fdda:JNLP4-connect connection from 10.233.107.25/10.233.107.25:56000] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@f05553d; decorates RemoteLauncher[hudson.remoting.Channel@4ec789f:JNLP4-connect connection from 10.233.66.234/10.233.66.234:38468] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@c9768c4; decorates RemoteLauncher[hudson.remoting.Channel@1aaf32bc:JNLP4-connect connection from 10.233.88.78/10.233.88.78:52832] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@1ce3ee1e; decorates RemoteLauncher[hudson.remoting.Channel@17f7a4ef:JNLP4-connect connection from 10.233.67.30/10.233.67.30:45802] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@7761b0ee; decorates RemoteLauncher[hudson.remoting.Channel@6f3a73fb:JNLP4-connect connection from 10.233.126.233/10.233.126.233:33292] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@46f2f99e; decorates RemoteLauncher[hudson.remoting.Channel@58f1277c:JNLP4-connect connection from 10.233.70.252/10.233.70.252:44040] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@1090f2e; decorates RemoteLauncher[hudson.remoting.Channel@90f21e0:JNLP4-connect connection from 10.233.69.148/10.233.69.148:58960] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@58d601d8; decorates RemoteLauncher[hudson.remoting.Channel@1b9f00ce:JNLP4-connect connection from 10.233.68.109/10.233.68.109:44572] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@1ea70cd5; decorates RemoteLauncher[hudson.remoting.Channel@513cd885:JNLP4-connect connection from 10.233.71.151/10.233.71.151:45290] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Avoid second fetch > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Commit message: "fix(br): use failpoint tidb-server instead (#2951)" ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0ccadac40013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh, pid:1804, start at 2024-05-04 18:42:28.176901984 +0800 CST m=+5.231933126 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:44:28.185 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:42:28.145 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:32:28.145 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0ccadac40013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh, pid:1804, start at 2024-05-04 18:42:28.176901984 +0800 CST m=+5.231933126 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:44:28.185 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:42:28.145 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:32:28.145 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0ccadc40000c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh, pid:1875, start at 2024-05-04 18:42:28.251626733 +0800 CST m=+5.240348835 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:44:28.259 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:42:28.240 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:32:28.240 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_big_messages/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_big_messages/tiflash/log/error.log arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_big_messages/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_big_messages/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_big_messages/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [Sat May 4 18:42:33 CST 2024] <<<<<< START cdc server in kafka_big_messages case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_big_messages.33633365.out server --log-file /tmp/tidb_cdc_test/kafka_big_messages/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_big_messages/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:42:36 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c67ada8e-401b-4471-b5e5-c5c5d30b0915 {"id":"c67ada8e-401b-4471-b5e5-c5c5d30b0915","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819353} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43328c7bcd c67ada8e-401b-4471-b5e5-c5c5d30b0915 /tidb/cdc/default/default/upstream/7365092981209419546 {"id":7365092981209419546,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c67ada8e-401b-4471-b5e5-c5c5d30b0915 {"id":"c67ada8e-401b-4471-b5e5-c5c5d30b0915","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819353} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43328c7bcd c67ada8e-401b-4471-b5e5-c5c5d30b0915 /tidb/cdc/default/default/upstream/7365092981209419546 {"id":7365092981209419546,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/c67ada8e-401b-4471-b5e5-c5c5d30b0915 {"id":"c67ada8e-401b-4471-b5e5-c5c5d30b0915","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819353} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43328c7bcd c67ada8e-401b-4471-b5e5-c5c5d30b0915 /tidb/cdc/default/default/upstream/7365092981209419546 {"id":7365092981209419546,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: ddf66e3c-eb32-4bfb-b31a-53a98474030f Info: {"upstream_id":7365092981209419546,"namespace":"default","id":"ddf66e3c-eb32-4bfb-b31a-53a98474030f","sink_uri":"kafka://127.0.0.1:9092/big-message-test?protocol=open-protocol\u0026partition-num=1\u0026kafka-version=2.4.1\u0026max-message-bytes=12582912","create_time":"2024-05-04T18:42:36.666760836+08:00","start_ts":449529604563271681,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529604563271681,"checkpoint_ts":449529604563271681,"checkpoint_time":"2024-05-04 18:42:33.345"} [Sat May 4 18:42:36 CST 2024] <<<<<< START kafka consumer in kafka_big_messages case >>>>>> Starting generate kafka big messages... go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading go.uber.org/atomic v1.11.0 Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843/tiflow-cdc) 3727233024 bytes in 16.51 secs (225785261 bytes/sec) [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] container [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] sh [Pipeline] checkout [Pipeline] checkout The recommended git tool is: git [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { The recommended git tool is: git [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] // container [Pipeline] // timeout [Pipeline] sh No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@5c6b994a; decorates RemoteLauncher[hudson.remoting.Channel@221ef20e:JNLP4-connect connection from 10.233.100.221/10.233.100.221:55662] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@791363ae; decorates RemoteLauncher[hudson.remoting.Channel@3ca6114:JNLP4-connect connection from 10.233.106.134/10.233.106.134:48938] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 [Pipeline] } [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G03 Run cases: row_format drop_many_tables processor_stop_delay partition_table PROW_JOB_ID=77d328ba-2501-4ebd-a790-6ed9b1f9c95d JENKINS_NODE_COOKIE=208f760e-3deb-41ad-bba3-ce57c08ded6c BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-lcj6v-rkbch HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786704691812372484","prowjobid":"77d328ba-2501-4ebd-a790-6ed9b1f9c95d","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/5bf93c6caedff315c4c9650d80e951e31bc88a3d","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1843 TEST_GROUP=G03 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786704691812372484 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-lcj6v GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-lcj6v pingcap-tiflow-pull-cdc-integration-kafka-test-1843-lcj6v-rkbch GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-lcj6v-rkbch GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1843 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/row_format/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 start tidb cluster in /tmp/tidb_cdc_test/row_format Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table kafka_big_messages.test exists check diff failed 1-th time, retry later check diff failed 2-th time, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release check diff successfully wait process cdc.test exit for 1-th time... Starting Upstream TiDB... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 18:42:49 CST 2024] <<<<<< run test case kafka_big_messages success! >>>>>> Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843/tiflow-cdc) 3727233024 bytes in 8.45 secs (441062765 bytes/sec) [Pipeline] { [Pipeline] // container [Pipeline] sh [Pipeline] container [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G02 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Run cases: consistent_replicate_ddl consistent_replicate_gbk consistent_replicate_nfs consistent_replicate_storage_file consistent_replicate_storage_file_large_value consistent_replicate_storage_s3 consistent_partition_table kafka_big_messages_v2 multi_tables_ddl_v2 multi_topics_v2 storage_cleanup csv_storage_basic csv_storage_multi_tables_ddl csv_storage_partition_table PROW_JOB_ID=77d328ba-2501-4ebd-a790-6ed9b1f9c95d JENKINS_NODE_COOKIE=1fbb3a0c-e80d-4ce6-9aaf-3f7215d548f7 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-92hlc-1tj2z HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786704691812372484","prowjobid":"77d328ba-2501-4ebd-a790-6ed9b1f9c95d","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/5bf93c6caedff315c4c9650d80e951e31bc88a3d","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1843 TEST_GROUP=G02 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786704691812372484 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-92hlc GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-92hlc-1tj2z pingcap_tiflow_pull_cdc_integration_kafka_test_1843-92hlc GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-92hlc-1tj2z GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1843 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_ddl/run.sh using Sink-Type: kafka... <<================= [Sat May 4 18:42:52 CST 2024] <<<<<< run test case consistent_replicate_ddl success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] } [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] // timeout [Pipeline] } [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_gbk/run.sh using Sink-Type: kafka... <<================= * About to connect() to 127.0.0.1 port 24927 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:24927; Connection refused * Closing connection 0 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0ccc81f00010 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-lcj6v-rkbch, pid:1413, start at 2024-05-04 18:42:55.243538753 +0800 CST m=+5.035906223 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:44:55.250 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:42:55.228 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:32:55.228 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0ccc81f00010 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-lcj6v-rkbch, pid:1413, start at 2024-05-04 18:42:55.243538753 +0800 CST m=+5.035906223 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:44:55.250 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:42:55.228 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:32:55.228 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0ccc83800017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-lcj6v-rkbch, pid:1501, start at 2024-05-04 18:42:55.37462194 +0800 CST m=+5.105710845 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:44:55.383 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:42:55.377 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:32:55.377 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/row_format/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/row_format/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/row_format/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/row_format/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/row_format/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } You are running an older version of MinIO released 3 years ago Update: Run `mc admin update` Attempting encryption of all config, IAM users and policies on MinIO backend Endpoint: http://127.0.0.1:24927 Object API (Amazon S3 compatible): Go: https://docs.min.io/docs/golang-client-quickstart-guide Java: https://docs.min.io/docs/java-client-quickstart-guide Python: https://docs.min.io/docs/python-client-quickstart-guide JavaScript: https://docs.min.io/docs/javascript-client-quickstart-guide .NET: https://docs.min.io/docs/dotnet-client-quickstart-guide * About to connect() to 127.0.0.1 port 24927 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 24927 (#0) > GET / HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:24927 > Accept: */* > < HTTP/1.1 403 Forbidden < Accept-Ranges: bytes < Content-Length: 226 < Content-Security-Policy: block-all-mixed-content < Content-Type: application/xml < Server: MinIO/RELEASE.2020-07-27T18-37-02Z < Vary: Origin < X-Amz-Request-Id: 17CC432382124855 < X-Xss-Protection: 1; mode=block < Date: Sat, 04 May 2024 10:42:57 GMT < { [data not shown] * Connection #0 to host 127.0.0.1 left intact Bucket 's3://logbucket/' created [Sat May 4 18:42:58 CST 2024] <<<<<< run test case consistent_replicate_gbk success! >>>>>> Exiting on signal: INTERRUPT + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.row_format.cli.2843.out cli tso query --pd=http://127.0.0.1:2379 + set +x + tso='449529611256594433 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449529611256594433 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 18:43:00 CST 2024] <<<<<< START cdc server in row_format case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.row_format.28802882.out server --log-file /tmp/tidb_cdc_test/row_format/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/row_format/cdc_data --cluster-id default + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_nfs/run.sh using Sink-Type: kafka... <<================= [Sat May 4 18:43:01 CST 2024] <<<<<< run test case consistent_replicate_nfs success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... start tidb cluster in /tmp/tidb_cdc_test/kafka_compression Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:43:05 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/765cadcd-3ab8-4d24-bf69-9a2ed3d24959 {"id":"765cadcd-3ab8-4d24-bf69-9a2ed3d24959","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819380} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4332eedcd3 765cadcd-3ab8-4d24-bf69-9a2ed3d24959 /tidb/cdc/default/default/upstream/7365093093418089165 {"id":7365093093418089165,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/765cadcd-3ab8-4d24-bf69-9a2ed3d24959 {"id":"765cadcd-3ab8-4d24-bf69-9a2ed3d24959","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819380} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4332eedcd3 765cadcd-3ab8-4d24-bf69-9a2ed3d24959 /tidb/cdc/default/default/upstream/7365093093418089165 {"id":7365093093418089165,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/765cadcd-3ab8-4d24-bf69-9a2ed3d24959 {"id":"765cadcd-3ab8-4d24-bf69-9a2ed3d24959","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819380} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4332eedcd3 765cadcd-3ab8-4d24-bf69-9a2ed3d24959 /tidb/cdc/default/default/upstream/7365093093418089165 {"id":7365093093418089165,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.row_format.cli.2941.out cli changefeed create --start-ts=449529611256594433 '--sink-uri=kafka://127.0.0.1:9092/ticdc-row-format-test-3510?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Verifying downstream PD is started... Create changefeed successfully! ID: 6a38ed2c-5657-4aa6-89b1-f3c696c10eeb Info: {"upstream_id":7365093093418089165,"namespace":"default","id":"6a38ed2c-5657-4aa6-89b1-f3c696c10eeb","sink_uri":"kafka://127.0.0.1:9092/ticdc-row-format-test-3510?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T18:43:06.102517755+08:00","start_ts":449529611256594433,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529611256594433,"checkpoint_ts":449529611256594433,"checkpoint_time":"2024-05-04 18:42:58.878"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + set +x [Sat May 4 18:43:07 CST 2024] <<<<<< START kafka consumer in row_format case >>>>>> Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_storage_file/run.sh using Sink-Type: kafka... <<================= [Sat May 4 18:43:08 CST 2024] <<<<<< run test case consistent_replicate_storage_file success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_storage_file_large_value/run.sh using Sink-Type: kafka... <<================= [Sat May 4 18:43:11 CST 2024] <<<<<< run test case consistent_replicate_storage_file_large_value success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0ccda7dc0016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh, pid:5783, start at 2024-05-04 18:43:14.086664916 +0800 CST m=+5.124099303 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:45:14.093 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:43:14.090 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:33:14.090 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0ccda7dc0016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh, pid:5783, start at 2024-05-04 18:43:14.086664916 +0800 CST m=+5.124099303 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:45:14.093 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:43:14.090 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:33:14.090 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0ccda95c0016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh, pid:5855, start at 2024-05-04 18:43:14.171623895 +0800 CST m=+5.157976629 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:45:14.178 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:43:14.135 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:33:14.135 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_compression/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_compression/tiflash/log/error.log arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_compression/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_compression/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_compression/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_storage_s3/run.sh using Sink-Type: kafka... <<================= * About to connect() to 127.0.0.1 port 24927 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:24927; Connection refused * Closing connection 0 You are running an older version of MinIO released 3 years ago Update: Run `mc admin update` Attempting encryption of all config, IAM users and policies on MinIO backend Endpoint: http://127.0.0.1:24927 Object API (Amazon S3 compatible): Go: https://docs.min.io/docs/golang-client-quickstart-guide Java: https://docs.min.io/docs/java-client-quickstart-guide Python: https://docs.min.io/docs/python-client-quickstart-guide JavaScript: https://docs.min.io/docs/javascript-client-quickstart-guide .NET: https://docs.min.io/docs/dotnet-client-quickstart-guide [Sat May 4 18:43:17 CST 2024] <<<<<< START cdc server in kafka_compression case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.71677169.out server --log-file /tmp/tidb_cdc_test/kafka_compression/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_compression/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table row_format.finish_mark not exists for 1-th check, retry later * About to connect() to 127.0.0.1 port 24927 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 24927 (#0) > GET / HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:24927 > Accept: */* > < HTTP/1.1 403 Forbidden < Accept-Ranges: bytes < Content-Length: 226 < Content-Security-Policy: block-all-mixed-content < Content-Type: application/xml < Server: MinIO/RELEASE.2020-07-27T18-37-02Z < Vary: Origin < X-Amz-Request-Id: 17CC432824A56EB2 < X-Xss-Protection: 1; mode=block < Date: Sat, 04 May 2024 10:43:17 GMT < { [data not shown] * Connection #0 to host 127.0.0.1 left intact Bucket 's3://logbucket/' created [Sat May 4 18:43:17 CST 2024] <<<<<< run test case consistent_replicate_storage_s3 success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Exiting on signal: INTERRUPT table row_format.finish_mark not exists for 2-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:43:20 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6e9e015d-ada2-4374-b39a-f367fdc2a37a {"id":"6e9e015d-ada2-4374-b39a-f367fdc2a37a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819397} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4333421bc7 6e9e015d-ada2-4374-b39a-f367fdc2a37a /tidb/cdc/default/default/upstream/7365093174162711319 {"id":7365093174162711319,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6e9e015d-ada2-4374-b39a-f367fdc2a37a {"id":"6e9e015d-ada2-4374-b39a-f367fdc2a37a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819397} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4333421bc7 6e9e015d-ada2-4374-b39a-f367fdc2a37a /tidb/cdc/default/default/upstream/7365093174162711319 {"id":7365093174162711319,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6e9e015d-ada2-4374-b39a-f367fdc2a37a {"id":"6e9e015d-ada2-4374-b39a-f367fdc2a37a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819397} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4333421bc7 6e9e015d-ada2-4374-b39a-f367fdc2a37a /tidb/cdc/default/default/upstream/7365093174162711319 {"id":7365093174162711319,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7230.out cli tso query --pd=http://127.0.0.1:2379 table row_format.finish_mark not exists for 3-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_partition_table/run.sh using Sink-Type: kafka... <<================= [Sat May 4 18:43:21 CST 2024] <<<<<< run test case consistent_partition_table success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + tso='449529616974217220 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449529616974217220 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7265.out cli changefeed create --start-ts=449529616974217220 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-gzip-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=gzip' -c gzip Create changefeed successfully! ID: gzip Info: {"upstream_id":7365093174162711319,"namespace":"default","id":"gzip","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-gzip-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=gzip","create_time":"2024-05-04T18:43:22.619004231+08:00","start_ts":449529616974217220,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529616974217220,"checkpoint_ts":449529616974217220,"checkpoint_time":"2024-05-04 18:43:20.689"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843/tiflow-cdc) 3727233024 bytes in 26.92 secs (138439021 bytes/sec) [Pipeline] { [Pipeline] cache table row_format.finish_mark not exists for 4-th check, retry later + set +x [Sat May 4 18:43:24 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>> /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/05/04 18:43:22.575 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 18:43:22.614 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 18:43:22.736 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 18:43:22.744 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 18:43:23.711 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 18:43:23.720 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"]: syntax error: operand expected (error token is "[2024/05/04 18:43:22.575 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 18:43:22.614 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 18:43:22.736 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 18:43:22.744 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 18:43:23.711 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 18:43:23.720 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"]") table test.gzip_finish_mark not exists for 1-th check, retry later find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_big_messages_v2/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table row_format.finish_mark not exists for 5-th check, retry later table test.gzip_finish_mark not exists for 2-th check, retry later table row_format.finish_mark not exists for 6-th check, retry later table test.gzip_finish_mark exists check diff successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7396.out cli changefeed pause -c gzip start tidb cluster in /tmp/tidb_cdc_test/kafka_big_messages_v2 Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... table row_format.finish_mark not exists for 7-th check, retry later Verifying downstream PD is started... + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7427.out cli changefeed remove -c gzip Changefeed remove successfully. ID: gzip CheckpointTs: 449529617878876198 SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-gzip-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=gzip PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... table row_format.finish_mark not exists for 8-th check, retry later + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7467.out cli tso query --pd=http://127.0.0.1:2379 Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + set +x + tso='449529620133314561 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449529620133314561 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7505.out cli changefeed create --start-ts=449529620133314561 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-snappy-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=snappy' -c snappy Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table row_format.finish_mark not exists for 9-th check, retry later Create changefeed successfully! ID: snappy Info: {"upstream_id":7365093174162711319,"namespace":"default","id":"snappy","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-snappy-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=snappy","create_time":"2024-05-04T18:43:34.609662834+08:00","start_ts":449529620133314561,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529620133314561,"checkpoint_ts":449529620133314561,"checkpoint_time":"2024-05-04 18:43:32.740"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + set +x [Sat May 4 18:43:36 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>> /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/05/04 18:43:34.575 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 18:43:34.605 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 18:43:34.761 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 18:43:34.772 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 18:43:35.714 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 18:43:35.722 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"]: syntax error: operand expected (error token is "[2024/05/04 18:43:34.575 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 18:43:34.605 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 18:43:34.761 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 18:43:34.772 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 18:43:35.714 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 18:43:35.722 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"]") table test.snappy_finish_mark not exists for 1-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table row_format.finish_mark exists check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 18:43:37 CST 2024] <<<<<< run test case row_format success! >>>>>> table test.snappy_finish_mark not exists for 2-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.snappy_finish_mark not exists for 3-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0ccf35400012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-92hlc-1tj2z, pid:2107, start at 2024-05-04 18:43:39.492736852 +0800 CST m=+5.081233918 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:45:39.500 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:43:39.472 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:33:39.472 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0ccf35400012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-92hlc-1tj2z, pid:2107, start at 2024-05-04 18:43:39.492736852 +0800 CST m=+5.081233918 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:45:39.500 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:43:39.472 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:33:39.472 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0ccf39f40015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-92hlc-1tj2z, pid:2184, start at 2024-05-04 18:43:39.79789272 +0800 CST m=+5.330319905 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:45:39.806 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:43:39.773 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:33:39.773 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table test.snappy_finish_mark exists check diff successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7637.out cli changefeed pause -c snappy PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... [Sat May 4 18:43:42 CST 2024] <<<<<< START cdc server in kafka_big_messages_v2 case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_big_messages_v2.36133615.out server --log-file /tmp/tidb_cdc_test/kafka_big_messages_v2/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_big_messages_v2/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7672.out cli changefeed remove -c snappy Changefeed remove successfully. ID: snappy CheckpointTs: 449529622571253763 SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-snappy-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=snappy PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:43:45 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2e266737-a247-4ee3-8f55-238e5fd2f3df {"id":"2e266737-a247-4ee3-8f55-238e5fd2f3df","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819423} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4333a18cc9 2e266737-a247-4ee3-8f55-238e5fd2f3df /tidb/cdc/default/default/upstream/7365093283368464078 {"id":7365093283368464078,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2e266737-a247-4ee3-8f55-238e5fd2f3df {"id":"2e266737-a247-4ee3-8f55-238e5fd2f3df","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819423} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4333a18cc9 2e266737-a247-4ee3-8f55-238e5fd2f3df /tidb/cdc/default/default/upstream/7365093283368464078 {"id":7365093283368464078,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2e266737-a247-4ee3-8f55-238e5fd2f3df {"id":"2e266737-a247-4ee3-8f55-238e5fd2f3df","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819423} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4333a18cc9 2e266737-a247-4ee3-8f55-238e5fd2f3df /tidb/cdc/default/default/upstream/7365093283368464078 {"id":7365093283368464078,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: 002a2039-3627-43a8-a469-4117dd0ec41d Info: {"upstream_id":7365093283368464078,"namespace":"default","id":"002a2039-3627-43a8-a469-4117dd0ec41d","sink_uri":"kafka://127.0.0.1:9092/big-message-test?protocol=open-protocol\u0026partition-num=1\u0026kafka-version=2.4.1\u0026max-message-bytes=12582912","create_time":"2024-05-04T18:43:46.099086516+08:00","start_ts":449529622763405313,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529622763405313,"checkpoint_ts":449529622763405313,"checkpoint_time":"2024-05-04 18:43:42.773"} [Sat May 4 18:43:46 CST 2024] <<<<<< START kafka consumer in kafka_big_messages_v2 case >>>>>> Starting generate kafka big messages... go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading go.uber.org/atomic v1.11.0 + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7711.out cli tso query --pd=http://127.0.0.1:2379 + set +x + tso='449529623790223366 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449529623790223366 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7740.out cli changefeed create --start-ts=449529623790223366 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-lz4-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=lz4' -c lz4 Create changefeed successfully! ID: lz4 Info: {"upstream_id":7365093174162711319,"namespace":"default","id":"lz4","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-lz4-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=lz4","create_time":"2024-05-04T18:43:48.610153942+08:00","start_ts":449529623790223366,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529623790223366,"checkpoint_ts":449529623790223366,"checkpoint_time":"2024-05-04 18:43:46.690"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843/tiflow-cdc) 3727233024 bytes in 23.40 secs (159261707 bytes/sec) [Pipeline] { [Pipeline] cache + set +x [Sat May 4 18:43:50 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>> /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/05/04 18:43:48.576 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 18:43:48.606 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 18:43:48.761 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 18:43:48.769 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 18:43:49.714 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 18:43:49.721 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"]: syntax error: operand expected (error token is "[2024/05/04 18:43:48.576 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 18:43:48.606 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 18:43:48.761 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 18:43:48.769 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 18:43:49.714 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 18:43:49.721 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"]") table test.lz4_finish_mark not exists for 1-th check, retry later table kafka_big_messages.test exists =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/drop_many_tables/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table test.lz4_finish_mark not exists for 2-th check, retry later check diff failed 1-th time, retry later table test.lz4_finish_mark not exists for 3-th check, retry later check diff failed 2-th time, retry later start tidb cluster in /tmp/tidb_cdc_test/drop_many_tables Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table test.lz4_finish_mark exists check diff successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7875.out cli changefeed pause -c lz4 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... check diff successfully Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release wait process cdc.test exit for 1-th time... Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843/tiflow-cdc) 3727233024 bytes in 7.04 secs (529280030 bytes/sec) [Pipeline] { [Pipeline] cache wait process cdc.test exit for 2-th time... + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7908.out cli changefeed remove -c lz4 cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 18:43:58 CST 2024] <<<<<< run test case kafka_big_messages_v2 success! >>>>>> Changefeed remove successfully. ID: lz4 CheckpointTs: 449529626241269764 SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-lz4-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=lz4 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7941.out cli tso query --pd=http://127.0.0.1:2379 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + tso='449529627473346563 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449529627473346563 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7973.out cli changefeed create --start-ts=449529627473346563 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-zstd-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=zstd' -c zstd Create changefeed successfully! ID: zstd Info: {"upstream_id":7365093174162711319,"namespace":"default","id":"zstd","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-zstd-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=zstd","create_time":"2024-05-04T18:44:02.609755126+08:00","start_ts":449529627473346563,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529627473346563,"checkpoint_ts":449529627473346563,"checkpoint_time":"2024-05-04 18:44:00.740"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + set +x [Sat May 4 18:44:04 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/05/04 18:44:02.578 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 18:44:02.605 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 18:44:02.762 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 18:44:02.770 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 18:44:03.715 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 18:44:03.723 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"]: syntax error: operand expected (error token is "[2024/05/04 18:44:02.578 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 18:44:02.605 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 18:44:02.762 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 18:44:02.770 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 18:44:03.715 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 18:44:03.723 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"]") table test.zstd_finish_mark not exists for 1-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cd0c0a40013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-lcj6v-rkbch, pid:4297, start at 2024-05-04 18:44:04.798638373 +0800 CST m=+5.317510918 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:46:04.805 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:44:04.777 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:34:04.777 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.zstd_finish_mark not exists for 2-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cd0c0a40013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-lcj6v-rkbch, pid:4297, start at 2024-05-04 18:44:04.798638373 +0800 CST m=+5.317510918 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:46:04.805 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:44:04.777 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:34:04.777 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cd0c2200007 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-lcj6v-rkbch, pid:4370, start at 2024-05-04 18:44:04.87908487 +0800 CST m=+5.336445262 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:46:04.885 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:44:04.872 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:34:04.872 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/drop_many_tables/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/drop_many_tables/tiflash/log/error.log arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/drop_many_tables/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/drop_many_tables/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/drop_many_tables/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table test.zstd_finish_mark not exists for 3-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843/tiflow-cdc) 3727233024 bytes in 8.79 secs (423818872 bytes/sec) + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.drop_many_tables.cli.5721.out cli tso query --pd=http://127.0.0.1:2379 [Pipeline] { [Pipeline] cache table test.zstd_finish_mark exists check diff successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.8096.out cli changefeed pause -c zstd PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... + set +x + tso='449529629933830145 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449529629933830145 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 18:44:11 CST 2024] <<<<<< START cdc server in drop_many_tables case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.drop_many_tables.57585760.out server --log-file /tmp/tidb_cdc_test/drop_many_tables/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/drop_many_tables/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.8132.out cli changefeed remove -c zstd Changefeed remove successfully. ID: zstd CheckpointTs: 449529629911285769 SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-zstd-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=zstd PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_tables_ddl_v2/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... + set +x wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:44:15 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/8839c772-3501-4ddc-b207-a65c301525c0 {"id":"8839c772-3501-4ddc-b207-a65c301525c0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819451} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43340b6cd4 8839c772-3501-4ddc-b207-a65c301525c0 /tidb/cdc/default/default/upstream/7365093397111086189 {"id":7365093397111086189,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/8839c772-3501-4ddc-b207-a65c301525c0 {"id":"8839c772-3501-4ddc-b207-a65c301525c0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819451} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43340b6cd4 8839c772-3501-4ddc-b207-a65c301525c0 /tidb/cdc/default/default/upstream/7365093397111086189 {"id":7365093397111086189,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/8839c772-3501-4ddc-b207-a65c301525c0 {"id":"8839c772-3501-4ddc-b207-a65c301525c0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819451} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43340b6cd4 8839c772-3501-4ddc-b207-a65c301525c0 /tidb/cdc/default/default/upstream/7365093397111086189 {"id":7365093397111086189,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.drop_many_tables.cli.5816.out cli changefeed create --start-ts=449529629933830145 '--sink-uri=kafka://127.0.0.1:9092/ticdc-drop-tables-test-23362?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 18:44:15 CST 2024] <<<<<< run test case kafka_compression success! >>>>>> Create changefeed successfully! ID: ecc23bc3-2650-4b7a-8c8a-142ea567c924 Info: {"upstream_id":7365093397111086189,"namespace":"default","id":"ecc23bc3-2650-4b7a-8c8a-142ea567c924","sink_uri":"kafka://127.0.0.1:9092/ticdc-drop-tables-test-23362?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T18:44:16.084923357+08:00","start_ts":449529629933830145,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529629933830145,"checkpoint_ts":449529629933830145,"checkpoint_time":"2024-05-04 18:44:10.126"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + set +x [Sat May 4 18:44:17 CST 2024] <<<<<< START kafka consumer in drop_many_tables case >>>>>> table drop_tables.c not exists for 1-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/multi_tables_ddl_v2 Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table drop_tables.c not exists for 2-th check, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table drop_tables.c not exists for 3-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table drop_tables.c not exists for 4-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table drop_tables.c not exists for 5-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_messages/run.sh using Sink-Type: kafka... <<================= [Sat May 4 18:44:27 CST 2024] <<<<<< run test case kafka_messages success! >>>>>> table drop_tables.c exists check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 18:44:30 CST 2024] <<<<<< run test case drop_many_tables success! >>>>>> VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cd239d80013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-92hlc-1tj2z, pid:6067, start at 2024-05-04 18:44:28.94857906 +0800 CST m=+5.042822281 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:46:28.956 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:44:28.918 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:34:28.918 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cd239d80013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-92hlc-1tj2z, pid:6067, start at 2024-05-04 18:44:28.94857906 +0800 CST m=+5.042822281 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:46:28.956 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:44:28.918 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:34:28.918 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cd23c280015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-92hlc-1tj2z, pid:6151, start at 2024-05-04 18:44:29.108831743 +0800 CST m=+5.146838410 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:46:29.115 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:44:29.116 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:34:29.116 +0800 All versions after safe point can be accessed. (DO NOT EDIT) =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_sink_error_resume/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/log/error.log arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [Sat May 4 18:44:32 CST 2024] <<<<<< START cdc server in multi_tables_ddl_v2 case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_tables_ddl_v2.75227524.out server --log-file /tmp/tidb_cdc_test/multi_tables_ddl_v2/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_tables_ddl_v2/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 start tidb cluster in /tmp/tidb_cdc_test/kafka_sink_error_resume Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Verifying downstream PD is started... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:44:36 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/85cd57da-feb3-487f-ae53-8fa2af45a5d9 {"id":"85cd57da-feb3-487f-ae53-8fa2af45a5d9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819473} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f433466feca 85cd57da-feb3-487f-ae53-8fa2af45a5d9 /tidb/cdc/default/default/upstream/7365093499728287989 {"id":7365093499728287989,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/85cd57da-feb3-487f-ae53-8fa2af45a5d9 {"id":"85cd57da-feb3-487f-ae53-8fa2af45a5d9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819473} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f433466feca 85cd57da-feb3-487f-ae53-8fa2af45a5d9 /tidb/cdc/default/default/upstream/7365093499728287989 {"id":7365093499728287989,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/85cd57da-feb3-487f-ae53-8fa2af45a5d9 {"id":"85cd57da-feb3-487f-ae53-8fa2af45a5d9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819473} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f433466feca 85cd57da-feb3-487f-ae53-8fa2af45a5d9 /tidb/cdc/default/default/upstream/7365093499728287989 {"id":7365093499728287989,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: test-normal Info: {"upstream_id":7365093499728287989,"namespace":"default","id":"test-normal","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-tables-ddl-test-normal-5678?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T18:44:36.296598903+08:00","start_ts":449529635921723395,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["multi_tables_ddl_test.t1","multi_tables_ddl_test.t2","multi_tables_ddl_test.t3","multi_tables_ddl_test.t4","multi_tables_ddl_test.t1_7","multi_tables_ddl_test.t2_7","multi_tables_ddl_test.finish_mark"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529635921723395,"checkpoint_ts":449529635921723395,"checkpoint_time":"2024-05-04 18:44:32.968"} Create changefeed successfully! ID: test-error-1 Info: {"upstream_id":7365093499728287989,"namespace":"default","id":"test-error-1","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-tables-ddl-test-error-1-19284?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T18:44:36.495660795+08:00","start_ts":449529635921723395,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["multi_tables_ddl_test.t5","multi_tables_ddl_test.t6","multi_tables_ddl_test.t7","multi_tables_ddl_test.t8"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529635921723395,"checkpoint_ts":449529635921723395,"checkpoint_time":"2024-05-04 18:44:32.968"} Create changefeed successfully! ID: test-error-2 Info: {"upstream_id":7365093499728287989,"namespace":"default","id":"test-error-2","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-tables-ddl-test-error-2-6506?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T18:44:36.688060991+08:00","start_ts":449529635921723395,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["multi_tables_ddl_test.t9","multi_tables_ddl_test.t10"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529635921723395,"checkpoint_ts":449529635921723395,"checkpoint_time":"2024-05-04 18:44:32.968"} [Sat May 4 18:44:36 CST 2024] <<<<<< START kafka consumer in multi_tables_ddl_v2 case >>>>>> [Sat May 4 18:44:36 CST 2024] <<<<<< START kafka consumer in multi_tables_ddl_v2 case >>>>>> [Sat May 4 18:44:36 CST 2024] <<<<<< START kafka consumer in multi_tables_ddl_v2 case >>>>>> Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843/tiflow-cdc) 3727233024 bytes in 25.10 secs (148473821 bytes/sec) [Pipeline] { [Pipeline] cache Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table multi_tables_ddl_test.t55 not exists for 1-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table multi_tables_ddl_test.t55 not exists for 2-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_stop_delay/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table multi_tables_ddl_test.t55 not exists for 3-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table multi_tables_ddl_test.t55 not exists for 4-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cd338f00013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh, pid:9372, start at 2024-05-04 18:44:45.261971532 +0800 CST m=+5.188259540 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:46:45.269 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:44:45.244 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:34:45.244 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table multi_tables_ddl_test.t55 exists table multi_tables_ddl_test.t66 exists table multi_tables_ddl_test.t7 exists table multi_tables_ddl_test.t88 exists table multi_tables_ddl_test.finish_mark not exists for 1-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843/tiflow-cdc) 3727233024 bytes in 7.96 secs (468383425 bytes/sec) [Pipeline] { [Pipeline] cache VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cd338f00013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh, pid:9372, start at 2024-05-04 18:44:45.261971532 +0800 CST m=+5.188259540 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:46:45.269 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:44:45.244 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:34:45.244 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cd339ac000a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh, pid:9454, start at 2024-05-04 18:44:45.300226017 +0800 CST m=+5.171368352 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:46:45.307 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:44:45.291 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:34:45.291 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } start tidb cluster in /tmp/tidb_cdc_test/processor_stop_delay Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table multi_tables_ddl_test.finish_mark not exists for 2-th check, retry later [Sat May 4 18:44:50 CST 2024] <<<<<< START cdc server in kafka_sink_error_resume case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/sink/dmlsink/mq/dmlproducer/KafkaSinkAsyncSendError=1*return(true)' + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_sink_error_resume.1076510767.out server --log-file /tmp/tidb_cdc_test/kafka_sink_error_resume/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_sink_error_resume/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table multi_tables_ddl_test.finish_mark exists check table exists success + endpoints=http://127.0.0.1:2379 + changefeed_id=test-normal + expected_state=normal + error_msg=null + tls_dir= + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test-normal -s + info='{ "upstream_id": 7365093499728287989, "namespace": "default", "id": "test-normal", "state": "normal", "checkpoint_tso": 449529637678350371, "checkpoint_time": "2024-05-04 18:44:39.669", "error": null }' + echo '{ "upstream_id": 7365093499728287989, "namespace": "default", "id": "test-normal", "state": "normal", "checkpoint_tso": 449529637678350371, "checkpoint_time": "2024-05-04 18:44:39.669", "error": null }' { "upstream_id": 7365093499728287989, "namespace": "default", "id": "test-normal", "state": "normal", "checkpoint_tso": 449529637678350371, "checkpoint_time": "2024-05-04 18:44:39.669", "error": null } ++ echo '{' '"upstream_id":' 7365093499728287989, '"namespace":' '"default",' '"id":' '"test-normal",' '"state":' '"normal",' '"checkpoint_tso":' 449529637678350371, '"checkpoint_time":' '"2024-05-04' '18:44:39.669",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365093499728287989, '"namespace":' '"default",' '"id":' '"test-normal",' '"state":' '"normal",' '"checkpoint_tso":' 449529637678350371, '"checkpoint_time":' '"2024-05-04' '18:44:39.669",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] + endpoints=http://127.0.0.1:2379 + changefeed_id=test-error-1 + expected_state=normal + error_msg=null + tls_dir= + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test-error-1 -s + info='{ "upstream_id": 7365093499728287989, "namespace": "default", "id": "test-error-1", "state": "normal", "checkpoint_tso": 449529640889352219, "checkpoint_time": "2024-05-04 18:44:51.918", "error": null }' + echo '{ "upstream_id": 7365093499728287989, "namespace": "default", "id": "test-error-1", "state": "normal", "checkpoint_tso": 449529640889352219, "checkpoint_time": "2024-05-04 18:44:51.918", "error": null }' { "upstream_id": 7365093499728287989, "namespace": "default", "id": "test-error-1", "state": "normal", "checkpoint_tso": 449529640889352219, "checkpoint_time": "2024-05-04 18:44:51.918", "error": null } ++ echo '{' '"upstream_id":' 7365093499728287989, '"namespace":' '"default",' '"id":' '"test-error-1",' '"state":' '"normal",' '"checkpoint_tso":' 449529640889352219, '"checkpoint_time":' '"2024-05-04' '18:44:51.918",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365093499728287989, '"namespace":' '"default",' '"id":' '"test-error-1",' '"state":' '"normal",' '"checkpoint_tso":' 449529640889352219, '"checkpoint_time":' '"2024-05-04' '18:44:51.918",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] + endpoints=http://127.0.0.1:2379 + changefeed_id=test-error-2 + expected_state=failed + error_msg=ErrSyncRenameTableFailed + tls_dir= + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test-error-2 -s + info='{ "upstream_id": 7365093499728287989, "namespace": "default", "id": "test-error-2", "state": "failed", "checkpoint_tso": 449529637206229029, "checkpoint_time": "2024-05-04 18:44:37.868", "error": { "time": "2024-05-04T18:44:40.026537188+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSyncRenameTableFailed", "message": "[CDC:ErrSyncRenameTableFailed]table'\''s old name is not in filter rule, and its new name in filter rule table id '\''130'\'', ddl query: [rename table t11 to t9], it'\''s an unexpected behavior, if you want to replicate this table, please add its old name to filter rule." } }' + echo '{ "upstream_id": 7365093499728287989, "namespace": "default", "id": "test-error-2", "state": "failed", "checkpoint_tso": 449529637206229029, "checkpoint_time": "2024-05-04 18:44:37.868", "error": { "time": "2024-05-04T18:44:40.026537188+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSyncRenameTableFailed", "message": "[CDC:ErrSyncRenameTableFailed]table'\''s old name is not in filter rule, and its new name in filter rule table id '\''130'\'', ddl query: [rename table t11 to t9], it'\''s an unexpected behavior, if you want to replicate this table, please add its old name to filter rule." } }' { "upstream_id": 7365093499728287989, "namespace": "default", "id": "test-error-2", "state": "failed", "checkpoint_tso": 449529637206229029, "checkpoint_time": "2024-05-04 18:44:37.868", "error": { "time": "2024-05-04T18:44:40.026537188+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSyncRenameTableFailed", "message": "[CDC:ErrSyncRenameTableFailed]table's old name is not in filter rule, and its new name in filter rule table id '130', ddl query: [rename table t11 to t9], it's an unexpected behavior, if you want to replicate this table, please add its old name to filter rule." } } ++ jq -r .state ++ echo '{' '"upstream_id":' 7365093499728287989, '"namespace":' '"default",' '"id":' '"test-error-2",' '"state":' '"failed",' '"checkpoint_tso":' 449529637206229029, '"checkpoint_time":' '"2024-05-04' '18:44:37.868",' '"error":' '{' '"time":' '"2024-05-04T18:44:40.026537188+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrSyncRenameTableFailed",' '"message":' '"[CDC:ErrSyncRenameTableFailed]table'\''s' old name is not in filter rule, and its new name in filter rule table id ''\''130'\'',' ddl query: '[rename' table t11 to 't9],' 'it'\''s' an unexpected behavior, if you want to replicate this table, please add its old name to filter 'rule."' '}' '}' + state=failed + [[ ! failed == \f\a\i\l\e\d ]] ++ jq -r .error.message ++ echo '{' '"upstream_id":' 7365093499728287989, '"namespace":' '"default",' '"id":' '"test-error-2",' '"state":' '"failed",' '"checkpoint_tso":' 449529637206229029, '"checkpoint_time":' '"2024-05-04' '18:44:37.868",' '"error":' '{' '"time":' '"2024-05-04T18:44:40.026537188+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrSyncRenameTableFailed",' '"message":' '"[CDC:ErrSyncRenameTableFailed]table'\''s' old name is not in filter rule, and its new name in filter rule table id ''\''130'\'',' ddl query: '[rename' table t11 to 't9],' 'it'\''s' an unexpected behavior, if you want to replicate this table, please add its old name to filter 'rule."' '}' '}' + message='[CDC:ErrSyncRenameTableFailed]table'\''s old name is not in filter rule, and its new name in filter rule table id '\''130'\'', ddl query: [rename table t11 to t9], it'\''s an unexpected behavior, if you want to replicate this table, please add its old name to filter rule.' + [[ ! [CDC:ErrSyncRenameTableFailed]table's old name is not in filter rule, and its new name in filter rule table id '130', ddl query: [rename table t11 to t9], it's an unexpected behavior, if you want to replicate this table, please add its old name to filter rule. =~ ErrSyncRenameTableFailed ]] check diff successfully Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > wait process cdc.test exit for 1-th time... < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:44:53 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/80a520b2-9487-4ff7-9afc-2da4cb6aca89 {"id":"80a520b2-9487-4ff7-9afc-2da4cb6aca89","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819490} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4334a213d2 80a520b2-9487-4ff7-9afc-2da4cb6aca89 /tidb/cdc/default/default/upstream/7365093563529592253 {"id":7365093563529592253,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/80a520b2-9487-4ff7-9afc-2da4cb6aca89 {"id":"80a520b2-9487-4ff7-9afc-2da4cb6aca89","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819490} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4334a213d2 80a520b2-9487-4ff7-9afc-2da4cb6aca89 /tidb/cdc/default/default/upstream/7365093563529592253 {"id":7365093563529592253,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/80a520b2-9487-4ff7-9afc-2da4cb6aca89 {"id":"80a520b2-9487-4ff7-9afc-2da4cb6aca89","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819490} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4334a213d2 80a520b2-9487-4ff7-9afc-2da4cb6aca89 /tidb/cdc/default/default/upstream/7365093563529592253 {"id":7365093563529592253,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 18:44:53 CST 2024] <<<<<< START kafka consumer in kafka_sink_error_resume case >>>>>> wait process cdc.test exit for 2-th time... check_changefeed_status 127.0.0.1:8300 d250acc0-a71b-4b1c-8f08-29c0ddfccb57 warning last_warning kafka sink injected error + endpoint=127.0.0.1:8300 + changefeed_id=d250acc0-a71b-4b1c-8f08-29c0ddfccb57 + expected_state=warning + field=last_warning + error_pattern=kafka ++ curl 127.0.0.1:8300/api/v2/changefeeds/d250acc0-a71b-4b1c-8f08-29c0ddfccb57/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 86 100 86 0 0 642 0 --:--:-- --:--:-- --:--:-- 646 + info='{"state":"normal","resolved_ts":449529641341812738,"checkpoint_ts":449529641341812738}' + echo '{"state":"normal","resolved_ts":449529641341812738,"checkpoint_ts":449529641341812738}' {"state":"normal","resolved_ts":449529641341812738,"checkpoint_ts":449529641341812738} ++ echo '{"state":"normal","resolved_ts":449529641341812738,"checkpoint_ts":449529641341812738}' ++ jq -r .state + state=normal + [[ ! normal == \w\a\r\n\i\n\g ]] + echo 'changefeed state normal does not equal to warning' changefeed state normal does not equal to warning + exit 1 run task failed 1-th time, retry later wait process cdc.test exit for 3-th time... cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 18:44:54 CST 2024] <<<<<< run test case multi_tables_ddl_v2 success! >>>>>> Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check_changefeed_status 127.0.0.1:8300 d250acc0-a71b-4b1c-8f08-29c0ddfccb57 warning last_warning kafka sink injected error + endpoint=127.0.0.1:8300 + changefeed_id=d250acc0-a71b-4b1c-8f08-29c0ddfccb57 + expected_state=warning + field=last_warning + error_pattern=kafka ++ curl 127.0.0.1:8300/api/v2/changefeeds/d250acc0-a71b-4b1c-8f08-29c0ddfccb57/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 86 100 86 0 0 771 0 --:--:-- --:--:-- --:--:-- 767 100 86 100 86 0 0 770 0 --:--:-- --:--:-- --:--:-- 767 + info='{"state":"normal","resolved_ts":449529641420455979,"checkpoint_ts":449529641420455979}' + echo '{"state":"normal","resolved_ts":449529641420455979,"checkpoint_ts":449529641420455979}' {"state":"normal","resolved_ts":449529641420455979,"checkpoint_ts":449529641420455979} ++ echo '{"state":"normal","resolved_ts":449529641420455979,"checkpoint_ts":449529641420455979}' ++ jq -r .state + state=normal + [[ ! normal == \w\a\r\n\i\n\g ]] + echo 'changefeed state normal does not equal to warning' changefeed state normal does not equal to warning + exit 1 run task failed 2-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check_changefeed_status 127.0.0.1:8300 d250acc0-a71b-4b1c-8f08-29c0ddfccb57 warning last_warning kafka sink injected error + endpoint=127.0.0.1:8300 + changefeed_id=d250acc0-a71b-4b1c-8f08-29c0ddfccb57 + expected_state=warning + field=last_warning + error_pattern=kafka ++ curl 127.0.0.1:8300/api/v2/changefeeds/d250acc0-a71b-4b1c-8f08-29c0ddfccb57/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 2182 0 --:--:-- --:--:-- --:--:-- 2198 + info='{"state":"warning","resolved_ts":449529642770497544,"checkpoint_ts":449529641446670375,"last_warning":{"time":"2024-05-04T18:44:57.701647886+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449529642770497544,"checkpoint_ts":449529641446670375,"last_warning":{"time":"2024-05-04T18:44:57.701647886+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449529642770497544,"checkpoint_ts":449529641446670375,"last_warning":{"time":"2024-05-04T18:44:57.701647886+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449529642770497544,"checkpoint_ts":449529641446670375,"last_warning":{"time":"2024-05-04T18:44:57.701647886+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \w\a\r\n\i\n\g ]] + [[ -z last_warning ]] ++ echo '{"state":"warning","resolved_ts":449529642770497544,"checkpoint_ts":449529641446670375,"last_warning":{"time":"2024-05-04T18:44:57.701647886+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .last_warning.message + error_msg='kafka sink injected error' + [[ ! kafka sink injected error =~ kafka ]] run task successfully check_changefeed_status 127.0.0.1:8300 d250acc0-a71b-4b1c-8f08-29c0ddfccb57 normal + endpoint=127.0.0.1:8300 + changefeed_id=d250acc0-a71b-4b1c-8f08-29c0ddfccb57 + expected_state=normal + field= + error_pattern= ++ curl 127.0.0.1:8300/api/v2/changefeeds/d250acc0-a71b-4b1c-8f08-29c0ddfccb57/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 1798 0 --:--:-- --:--:-- --:--:-- 1807 + info='{"state":"warning","resolved_ts":449529642770497544,"checkpoint_ts":449529641446670375,"last_warning":{"time":"2024-05-04T18:44:57.701647886+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449529642770497544,"checkpoint_ts":449529641446670375,"last_warning":{"time":"2024-05-04T18:44:57.701647886+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449529642770497544,"checkpoint_ts":449529641446670375,"last_warning":{"time":"2024-05-04T18:44:57.701647886+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449529642770497544,"checkpoint_ts":449529641446670375,"last_warning":{"time":"2024-05-04T18:44:57.701647886+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \n\o\r\m\a\l ]] + echo 'changefeed state warning does not equal to normal' changefeed state warning does not equal to normal + exit 1 run task failed 1-th time, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cd41eac0002 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-lcj6v-rkbch, pid:6990, start at 2024-05-04 18:44:59.947442411 +0800 CST m=+5.161167685 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:46:59.954 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:44:59.947 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:34:59.947 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cd41eac0002 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-lcj6v-rkbch, pid:6990, start at 2024-05-04 18:44:59.947442411 +0800 CST m=+5.161167685 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:46:59.954 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:44:59.947 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:34:59.947 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cd435600003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-lcj6v-rkbch, pid:7074, start at 2024-05-04 18:45:01.400795778 +0800 CST m=+6.560445202 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:47:01.406 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:45:01.400 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:35:01.400 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/processor_stop_delay/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/processor_stop_delay/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/processor_stop_delay/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/processor_stop_delay/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/processor_stop_delay/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } check_changefeed_status 127.0.0.1:8300 d250acc0-a71b-4b1c-8f08-29c0ddfccb57 normal + endpoint=127.0.0.1:8300 + changefeed_id=d250acc0-a71b-4b1c-8f08-29c0ddfccb57 + expected_state=normal + field= + error_pattern= ++ curl 127.0.0.1:8300/api/v2/changefeeds/d250acc0-a71b-4b1c-8f08-29c0ddfccb57/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 2179 0 --:--:-- --:--:-- --:--:-- 2178 100 244 100 244 0 0 2177 0 --:--:-- --:--:-- --:--:-- 2178 + info='{"state":"warning","resolved_ts":449529643832180739,"checkpoint_ts":449529641446670375,"last_warning":{"time":"2024-05-04T18:44:57.701647886+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449529643832180739,"checkpoint_ts":449529641446670375,"last_warning":{"time":"2024-05-04T18:44:57.701647886+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449529643832180739,"checkpoint_ts":449529641446670375,"last_warning":{"time":"2024-05-04T18:44:57.701647886+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449529643832180739,"checkpoint_ts":449529641446670375,"last_warning":{"time":"2024-05-04T18:44:57.701647886+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \n\o\r\m\a\l ]] + echo 'changefeed state warning does not equal to normal' changefeed state warning does not equal to normal + exit 1 run task failed 2-th time, retry later [Sat May 4 18:45:05 CST 2024] <<<<<< START cdc server in processor_stop_delay case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/processorStopDelay=1*sleep(10000)' + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.processor_stop_delay.85318533.out server --log-file /tmp/tidb_cdc_test/processor_stop_delay/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/processor_stop_delay/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843/tiflow-cdc) 3727233024 bytes in 15.13 secs (246323902 bytes/sec) [Pipeline] { [Pipeline] cache check_changefeed_status 127.0.0.1:8300 d250acc0-a71b-4b1c-8f08-29c0ddfccb57 normal + endpoint=127.0.0.1:8300 + changefeed_id=d250acc0-a71b-4b1c-8f08-29c0ddfccb57 + expected_state=normal + field= + error_pattern= ++ curl 127.0.0.1:8300/api/v2/changefeeds/d250acc0-a71b-4b1c-8f08-29c0ddfccb57/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 2192 0 --:--:-- --:--:-- --:--:-- 2198 + info='{"state":"warning","resolved_ts":449529644880756739,"checkpoint_ts":449529641446670375,"last_warning":{"time":"2024-05-04T18:44:57.701647886+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449529644880756739,"checkpoint_ts":449529641446670375,"last_warning":{"time":"2024-05-04T18:44:57.701647886+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449529644880756739,"checkpoint_ts":449529641446670375,"last_warning":{"time":"2024-05-04T18:44:57.701647886+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449529644880756739,"checkpoint_ts":449529641446670375,"last_warning":{"time":"2024-05-04T18:44:57.701647886+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \n\o\r\m\a\l ]] + echo 'changefeed state warning does not equal to normal' changefeed state warning does not equal to normal + exit 1 run task failed 3-th time, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:45:08 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/4f844a3b-1ae9-4ac4-b634-18be675b8797 {"id":"4f844a3b-1ae9-4ac4-b634-18be675b8797","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819505} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4334dbb2cf 4f844a3b-1ae9-4ac4-b634-18be675b8797 /tidb/cdc/default/default/upstream/7365093636103105026 {"id":7365093636103105026,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/4f844a3b-1ae9-4ac4-b634-18be675b8797 {"id":"4f844a3b-1ae9-4ac4-b634-18be675b8797","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819505} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4334dbb2cf 4f844a3b-1ae9-4ac4-b634-18be675b8797 /tidb/cdc/default/default/upstream/7365093636103105026 {"id":7365093636103105026,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/4f844a3b-1ae9-4ac4-b634-18be675b8797 {"id":"4f844a3b-1ae9-4ac4-b634-18be675b8797","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819505} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4334dbb2cf 4f844a3b-1ae9-4ac4-b634-18be675b8797 /tidb/cdc/default/default/upstream/7365093636103105026 {"id":7365093636103105026,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 18:45:08 CST 2024] <<<<<< START kafka consumer in processor_stop_delay case >>>>>> table processor_stop_delay.t not exists for 1-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_topics_v2/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table processor_stop_delay.t not exists for 2-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/multi_topics_v2 Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table processor_stop_delay.t exists check diff failed 1-th time, retry later check_changefeed_status 127.0.0.1:8300 d250acc0-a71b-4b1c-8f08-29c0ddfccb57 normal + endpoint=127.0.0.1:8300 + changefeed_id=d250acc0-a71b-4b1c-8f08-29c0ddfccb57 + expected_state=normal + field= + error_pattern= ++ curl 127.0.0.1:8300/api/v2/changefeeds/d250acc0-a71b-4b1c-8f08-29c0ddfccb57/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 2183 0 --:--:-- --:--:-- --:--:-- 2198 + info='{"state":"warning","resolved_ts":449529646453620740,"checkpoint_ts":449529641446670375,"last_warning":{"time":"2024-05-04T18:44:57.701647886+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449529646453620740,"checkpoint_ts":449529641446670375,"last_warning":{"time":"2024-05-04T18:44:57.701647886+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449529646453620740,"checkpoint_ts":449529641446670375,"last_warning":{"time":"2024-05-04T18:44:57.701647886+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449529646453620740,"checkpoint_ts":449529641446670375,"last_warning":{"time":"2024-05-04T18:44:57.701647886+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \n\o\r\m\a\l ]] + echo 'changefeed state warning does not equal to normal' changefeed state warning does not equal to normal + exit 1 run task failed 4-th time, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release check diff successfully Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 1-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 2-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843/tiflow-cdc) 3727233024 bytes in 13.05 secs (285646500 bytes/sec) [Pipeline] { [Pipeline] cache check diff failed 3-th time, retry later check_changefeed_status 127.0.0.1:8300 d250acc0-a71b-4b1c-8f08-29c0ddfccb57 normal + endpoint=127.0.0.1:8300 + changefeed_id=d250acc0-a71b-4b1c-8f08-29c0ddfccb57 + expected_state=normal + field= + error_pattern= ++ curl 127.0.0.1:8300/api/v2/changefeeds/d250acc0-a71b-4b1c-8f08-29c0ddfccb57/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 86 100 86 0 0 784 0 --:--:-- --:--:-- --:--:-- 788 + info='{"state":"normal","resolved_ts":449529648550772740,"checkpoint_ts":449529648550772740}' + echo '{"state":"normal","resolved_ts":449529648550772740,"checkpoint_ts":449529648550772740}' {"state":"normal","resolved_ts":449529648550772740,"checkpoint_ts":449529648550772740} ++ echo '{"state":"normal","resolved_ts":449529648550772740,"checkpoint_ts":449529648550772740}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] + [[ -z '' ]] ++ echo '{"state":"normal","resolved_ts":449529648550772740,"checkpoint_ts":449529648550772740}' ++ jq -r .last_error + error_msg=null + [[ ! null == \n\u\l\l ]] ++ echo '{"state":"normal","resolved_ts":449529648550772740,"checkpoint_ts":449529648550772740}' ++ jq -r .last_warning + error_msg=null + [[ ! null == \n\u\l\l ]] + exit 0 run task successfully table kafka_sink_error_resume.t1 exists table kafka_sink_error_resume.t2 exists check diff successfully check diff failed 1-th time, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cd57da40014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-92hlc-1tj2z, pid:9134, start at 2024-05-04 18:45:22.447032008 +0800 CST m=+5.946588610 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:47:22.455 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:45:22.459 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:35:22.459 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 4-th time, retry later check diff successfully VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cd57da40014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-92hlc-1tj2z, pid:9134, start at 2024-05-04 18:45:22.447032008 +0800 CST m=+5.946588610 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:47:22.455 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:45:22.459 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:35:22.459 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cd57e540007 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-92hlc-1tj2z, pid:9222, start at 2024-05-04 18:45:22.461111819 +0800 CST m=+5.904964455 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:47:22.467 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:45:22.453 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:35:22.453 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_topics_v2/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_topics_v2/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 18:45:25 CST 2024] <<<<<< run test case kafka_sink_error_resume success! >>>>>> check diff failed 5-th time, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_topics_v2.cli.10615.out cli tso query --pd=http://127.0.0.1:2379 check diff failed 6-th time, retry later + set +x + tso='449529650127568897 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449529650127568897 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 18:45:28 CST 2024] <<<<<< START cdc server in multi_topics_v2 case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_topics_v2.1065010652.out server --log-file /tmp/tidb_cdc_test/multi_topics_v2/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_topics_v2/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 18:45:31 CST 2024] <<<<<< run test case processor_stop_delay success! >>>>>> + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:45:31 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/aabdd9e8-3535-453c-8f9e-ac48c3826919 {"id":"aabdd9e8-3535-453c-8f9e-ac48c3826919","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819528} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4335346dd9 aabdd9e8-3535-453c-8f9e-ac48c3826919 /tidb/cdc/default/default/upstream/7365093729655863440 {"id":7365093729655863440,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/aabdd9e8-3535-453c-8f9e-ac48c3826919 {"id":"aabdd9e8-3535-453c-8f9e-ac48c3826919","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819528} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4335346dd9 aabdd9e8-3535-453c-8f9e-ac48c3826919 /tidb/cdc/default/default/upstream/7365093729655863440 {"id":7365093729655863440,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/aabdd9e8-3535-453c-8f9e-ac48c3826919 {"id":"aabdd9e8-3535-453c-8f9e-ac48c3826919","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819528} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4335346dd9 aabdd9e8-3535-453c-8f9e-ac48c3826919 /tidb/cdc/default/default/upstream/7365093729655863440 {"id":7365093729655863440,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_topics_v2.cli.10711.out cli changefeed create --start-ts=449529650127568897 '--sink-uri=kafka://127.0.0.1:9092/multi_topics?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1' --config /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_topics_v2/conf/changefeed.toml Create changefeed successfully! ID: 19e5c811-649b-4ae2-b7bb-6580bcf3f15a Info: {"upstream_id":7365093729655863440,"namespace":"default","id":"19e5c811-649b-4ae2-b7bb-6580bcf3f15a","sink_uri":"kafka://127.0.0.1:9092/multi_topics?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1","create_time":"2024-05-04T18:45:32.213748341+08:00","start_ts":449529650127568897,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"dispatchers":[{"matcher":["test.*"],"topic":"{schema}_{table}"}],"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529650127568897,"checkpoint_ts":449529650127568897,"checkpoint_time":"2024-05-04 18:45:27.159"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... + set +x =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_lost_callback/run.sh using Sink-Type: kafka... <<================= [Sat May 4 18:45:37 CST 2024] <<<<<< run test case mq_sink_lost_callback success! >>>>>> =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_dispatcher/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... start tidb cluster in /tmp/tidb_cdc_test/mq_sink_dispatcher Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843/tiflow-cdc) 3727233024 bytes in 22.27 secs (167397335 bytes/sec) [Pipeline] { [Pipeline] cache Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/partition_table/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) start tidb cluster in /tmp/tidb_cdc_test/partition_table Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cd78154000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh, pid:12325, start at 2024-05-04 18:45:55.429464954 +0800 CST m=+5.234897334 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:47:55.436 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:45:55.413 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:35:55.413 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cd78154000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh, pid:12325, start at 2024-05-04 18:45:55.429464954 +0800 CST m=+5.234897334 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:47:55.436 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:45:55.413 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:35:55.413 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cd782d00015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh, pid:12413, start at 2024-05-04 18:45:55.554065357 +0800 CST m=+5.306733145 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:47:55.561 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:45:55.559 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:35:55.559 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash/log/error.log arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash/log/proxy.log"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Sat May 4 18:46:00 CST 2024] <<<<<< START cdc server in mq_sink_dispatcher case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.1380513807.out server --log-file /tmp/tidb_cdc_test/mq_sink_dispatcher/cdc.log --log-level info --data-dir /tmp/tidb_cdc_test/mq_sink_dispatcher/cdc_data --cluster-id default + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:46:03 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bdec3109-7dcd-4675-b185-553142559dc7 {"id":"bdec3109-7dcd-4675-b185-553142559dc7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819560} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4335b3face bdec3109-7dcd-4675-b185-553142559dc7 /tidb/cdc/default/default/upstream/7365093864469984373 {"id":7365093864469984373,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bdec3109-7dcd-4675-b185-553142559dc7 {"id":"bdec3109-7dcd-4675-b185-553142559dc7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819560} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4335b3face bdec3109-7dcd-4675-b185-553142559dc7 /tidb/cdc/default/default/upstream/7365093864469984373 {"id":7365093864469984373,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/bdec3109-7dcd-4675-b185-553142559dc7 {"id":"bdec3109-7dcd-4675-b185-553142559dc7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819560} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4335b3face bdec3109-7dcd-4675-b185-553142559dc7 /tidb/cdc/default/default/upstream/7365093864469984373 {"id":7365093864469984373,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.cli.13881.out cli tso query --pd=http://127.0.0.1:2379 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cd7f3dc0015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-lcj6v-rkbch, pid:10174, start at 2024-05-04 18:46:02.757803457 +0800 CST m=+5.120086980 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:48:02.764 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:46:02.743 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:36:02.743 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cd7f3dc0015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-lcj6v-rkbch, pid:10174, start at 2024-05-04 18:46:02.757803457 +0800 CST m=+5.120086980 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:48:02.764 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:46:02.743 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:36:02.743 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cd7f628000a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-lcj6v-rkbch, pid:10249, start at 2024-05-04 18:46:02.900283096 +0800 CST m=+5.207195141 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:48:02.908 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:46:02.890 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:36:02.890 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/partition_table/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/partition_table/tiflash/log/error.log arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/partition_table/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/partition_table/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/partition_table/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + set +x + tso='449529659828207620 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449529659828207620 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.cli.13918.out cli changefeed create --start-ts=449529659828207620 '--sink-uri=kafka://127.0.0.1:9092/dispatcher-test?protocol=canal-json&enable-tidb-extension=true' -c test --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_dispatcher/conf/changefeed.toml Create changefeed successfully! ID: test Info: {"upstream_id":7365093864469984373,"namespace":"default","id":"test","sink_uri":"kafka://127.0.0.1:9092/dispatcher-test?protocol=canal-json\u0026enable-tidb-extension=true","create_time":"2024-05-04T18:46:06.048290554+08:00","start_ts":449529659828207620,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"dispatchers":[{"matcher":["verify.t"],"partition":"index-value"},{"matcher":["dispatcher.index"],"partition":"index-value","index":"idx_a"}],"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529659828207620,"checkpoint_ts":449529659828207620,"checkpoint_time":"2024-05-04 18:46:04.164"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.partition_table.cli.11646.out cli tso query --pd=http://127.0.0.1:2379 + set +x check_changefeed_state http://127.0.0.1:2379 test normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=test + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s + info='{ "upstream_id": 7365093864469984373, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449529659828207620, "checkpoint_time": "2024-05-04 18:46:04.164", "error": null }' + echo '{ "upstream_id": 7365093864469984373, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449529659828207620, "checkpoint_time": "2024-05-04 18:46:04.164", "error": null }' { "upstream_id": 7365093864469984373, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449529659828207620, "checkpoint_time": "2024-05-04 18:46:04.164", "error": null } ++ echo '{' '"upstream_id":' 7365093864469984373, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449529659828207620, '"checkpoint_time":' '"2024-05-04' '18:46:04.164",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365093864469984373, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449529659828207620, '"checkpoint_time":' '"2024-05-04' '18:46:04.164",' '"error":' null '}' ++ jq -r .error.message + set +x + tso='449529660373204993 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449529660373204993 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 18:46:07 CST 2024] <<<<<< START cdc server in partition_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.partition_table.1168311685.out server --log-file /tmp/tidb_cdc_test/partition_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/partition_table/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + message=null + [[ ! null =~ null ]] run task successfully check_changefeed_state http://127.0.0.1:2379 test failed ErrDispatcherFailed + endpoints=http://127.0.0.1:2379 + changefeed_id=test + expected_state=failed + error_msg=ErrDispatcherFailed + tls_dir=ErrDispatcherFailed + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s + info='{ "upstream_id": 7365093864469984373, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449529659828207620, "checkpoint_time": "2024-05-04 18:46:04.164", "error": null }' + echo '{ "upstream_id": 7365093864469984373, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449529659828207620, "checkpoint_time": "2024-05-04 18:46:04.164", "error": null }' { "upstream_id": 7365093864469984373, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449529659828207620, "checkpoint_time": "2024-05-04 18:46:04.164", "error": null } ++ echo '{' '"upstream_id":' 7365093864469984373, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449529659828207620, '"checkpoint_time":' '"2024-05-04' '18:46:04.164",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \f\a\i\l\e\d ]] + echo 'changefeed state normal does not equal to failed' changefeed state normal does not equal to failed + exit 1 run task failed 1-th time, retry later check_changefeed_state http://127.0.0.1:2379 test failed ErrDispatcherFailed + endpoints=http://127.0.0.1:2379 + changefeed_id=test + expected_state=failed + error_msg=ErrDispatcherFailed + tls_dir=ErrDispatcherFailed + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s + info='{ "upstream_id": 7365093864469984373, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449529660810985490, "checkpoint_time": "2024-05-04 18:46:07.913", "error": null }' + echo '{ "upstream_id": 7365093864469984373, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449529660810985490, "checkpoint_time": "2024-05-04 18:46:07.913", "error": null }' { "upstream_id": 7365093864469984373, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449529660810985490, "checkpoint_time": "2024-05-04 18:46:07.913", "error": null } ++ echo '{' '"upstream_id":' 7365093864469984373, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449529660810985490, '"checkpoint_time":' '"2024-05-04' '18:46:07.913",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \f\a\i\l\e\d ]] + echo 'changefeed state normal does not equal to failed' changefeed state normal does not equal to failed + exit 1 run task failed 2-th time, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:46:10 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/54ab2545-a321-4c28-b6e3-a3bd16121ed1 {"id":"54ab2545-a321-4c28-b6e3-a3bd16121ed1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819568} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4335d326ce 54ab2545-a321-4c28-b6e3-a3bd16121ed1 /tidb/cdc/default/default/upstream/7365093903028278099 {"id":7365093903028278099,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/54ab2545-a321-4c28-b6e3-a3bd16121ed1 {"id":"54ab2545-a321-4c28-b6e3-a3bd16121ed1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819568} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4335d326ce 54ab2545-a321-4c28-b6e3-a3bd16121ed1 /tidb/cdc/default/default/upstream/7365093903028278099 {"id":7365093903028278099,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/54ab2545-a321-4c28-b6e3-a3bd16121ed1 {"id":"54ab2545-a321-4c28-b6e3-a3bd16121ed1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819568} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4335d326ce 54ab2545-a321-4c28-b6e3-a3bd16121ed1 /tidb/cdc/default/default/upstream/7365093903028278099 {"id":7365093903028278099,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.partition_table.cli.11738.out cli changefeed create --start-ts=449529660373204993 '--sink-uri=kafka://127.0.0.1:9092/ticdc-partition-table-test-20356?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: 8406e1ef-58cd-45b2-b742-ff001f11ca6e Info: {"upstream_id":7365093903028278099,"namespace":"default","id":"8406e1ef-58cd-45b2-b742-ff001f11ca6e","sink_uri":"kafka://127.0.0.1:9092/ticdc-partition-table-test-20356?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T18:46:11.31407365+08:00","start_ts":449529660373204993,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529660373204993,"checkpoint_ts":449529660373204993,"checkpoint_time":"2024-05-04 18:46:06.243"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + set +x [Sat May 4 18:46:12 CST 2024] <<<<<< START kafka consumer in partition_table case >>>>>> check_changefeed_state http://127.0.0.1:2379 test failed ErrDispatcherFailed + endpoints=http://127.0.0.1:2379 + changefeed_id=test + expected_state=failed + error_msg=ErrDispatcherFailed + tls_dir=ErrDispatcherFailed + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s + info='{ "upstream_id": 7365093864469984373, "namespace": "default", "id": "test", "state": "failed", "checkpoint_tso": 449529660810985490, "checkpoint_time": "2024-05-04 18:46:07.913", "error": { "time": "2024-05-04T18:46:11.545326246+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrDispatcherFailed", "message": "[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a" } }' + echo '{ "upstream_id": 7365093864469984373, "namespace": "default", "id": "test", "state": "failed", "checkpoint_tso": 449529660810985490, "checkpoint_time": "2024-05-04 18:46:07.913", "error": { "time": "2024-05-04T18:46:11.545326246+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrDispatcherFailed", "message": "[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a" } }' { "upstream_id": 7365093864469984373, "namespace": "default", "id": "test", "state": "failed", "checkpoint_tso": 449529660810985490, "checkpoint_time": "2024-05-04 18:46:07.913", "error": { "time": "2024-05-04T18:46:11.545326246+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrDispatcherFailed", "message": "[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a" } } ++ echo '{' '"upstream_id":' 7365093864469984373, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"failed",' '"checkpoint_tso":' 449529660810985490, '"checkpoint_time":' '"2024-05-04' '18:46:07.913",' '"error":' '{' '"time":' '"2024-05-04T18:46:11.545326246+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrDispatcherFailed",' '"message":' '"[CDC:ErrDispatcherFailed]index' not found when dispatch event, table: index, index: 'idx_a"' '}' '}' ++ jq -r .state + state=failed + [[ ! failed == \f\a\i\l\e\d ]] ++ echo '{' '"upstream_id":' 7365093864469984373, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"failed",' '"checkpoint_tso":' 449529660810985490, '"checkpoint_time":' '"2024-05-04' '18:46:07.913",' '"error":' '{' '"time":' '"2024-05-04T18:46:11.545326246+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrDispatcherFailed",' '"message":' '"[CDC:ErrDispatcherFailed]index' not found when dispatch event, table: index, index: 'idx_a"' '}' '}' ++ jq -r .error.message + message='[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a' + [[ ! [CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a =~ ErrDispatcherFailed ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.cli.14178.out cli changefeed update -c test '--sink-uri=kafka://127.0.0.1:9092/dispatcher-test?protocol=canal-json&enable-tidb-extension=true' --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_dispatcher/conf/new_changefeed.toml --no-confirm Diff of changefeed config: {Type:update Path:[Config SyncPointInterval] From: To:0xc00391dd08} {Type:update Path:[Config SyncPointRetention] From: To:0xc00391dd18} {Type:update Path:[Config Sink DispatchRules 0 Matcher 0] From:verify.t To:dispatcher.index} {Type:delete Path:[Config Sink DispatchRules 1 Matcher 0] From:dispatcher.index To:} {Type:delete Path:[Config Sink DispatchRules 1 PartitionRule] From:index-value To:} {Type:delete Path:[Config Sink DispatchRules 1 IndexName] From:idx_a To:} {Type:update Path:[Config Consistent] From: To:0xc001338310} Update changefeed config successfully! ID: test Info: {"upstream_id":7365093864469984373,"namespace":"default","id":"test","sink_uri":"kafka://127.0.0.1:9092/dispatcher-test?protocol=canal-json\u0026enable-tidb-extension=true","create_time":"2024-05-04T18:46:06.048290554+08:00","start_ts":449529659828207620,"admin_job_type":1,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_table_monitor":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","dispatchers":[{"matcher":["dispatcher.index"],"partition":"index-value"}],"encoder_concurrency":32,"terminator":"\r\n","enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"failed","error":{"addr":"127.0.0.1:8300","code":"CDC:ErrDispatcherFailed","message":"[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a"},"creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":0,"checkpoint_ts":449529660810985490,"checkpoint_time":"2024-05-04 18:46:07.913"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.cli.14212.out cli changefeed resume -c test Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843/tiflow-cdc) 3727233024 bytes in 27.70 secs (134533399 bytes/sec) [Pipeline] { [Pipeline] cache PASS coverage: 2.1% of statements in github.com/pingcap/tiflow/... + set +x check_changefeed_state http://127.0.0.1:2379 test normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=test + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s + info='{ "upstream_id": 7365093864469984373, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449529660810985490, "checkpoint_time": "2024-05-04 18:46:07.913", "error": null }' + echo '{ "upstream_id": 7365093864469984373, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449529660810985490, "checkpoint_time": "2024-05-04 18:46:07.913", "error": null }' { "upstream_id": 7365093864469984373, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449529660810985490, "checkpoint_time": "2024-05-04 18:46:07.913", "error": null } ++ echo '{' '"upstream_id":' 7365093864469984373, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449529660810985490, '"checkpoint_time":' '"2024-05-04' '18:46:07.913",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365093864469984373, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449529660810985490, '"checkpoint_time":' '"2024-05-04' '18:46:07.913",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully table test.finish_mark not exists for 1-th check, retry later table test.finish_mark not exists for 2-th check, retry later table test.finish_mark exists check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 18:46:25 CST 2024] <<<<<< run test case mq_sink_dispatcher success! >>>>>> Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843/tiflow-cdc) 3727233024 bytes in 6.82 secs (546888451 bytes/sec) [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] sh [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh table partition_table.t exists table partition_table.t1 exists table partition_table.t2 not exists for 1-th check, retry later + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh table partition_table.t2 exists table partition_table.finish_mark not exists for 1-th check, retry later [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] } [Pipeline] sh /brokers/ids/1 + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] // container [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G04 Run cases: foreign_key ddl_puller_lag ddl_only_block_related_table changefeed_auto_stop PROW_JOB_ID=77d328ba-2501-4ebd-a790-6ed9b1f9c95d JENKINS_NODE_COOKIE=cab94ea3-8273-4750-9204-dfe65e5498f5 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-rfvt7-rz8h5 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786704691812372484","prowjobid":"77d328ba-2501-4ebd-a790-6ed9b1f9c95d","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/5bf93c6caedff315c4c9650d80e951e31bc88a3d","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1843 TEST_GROUP=G04 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786704691812372484 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-rfvt7 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-rfvt7-rz8h5 pingcap_tiflow_pull_cdc_integration_kafka_test_1843-rfvt7 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-rfvt7-rz8h5 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1843 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/foreign_key/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table partition_table.finish_mark not exists for 2-th check, retry later table partition_table.finish_mark not exists for 3-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/foreign_key Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table partition_table.finish_mark not exists for 4-th check, retry later Verifying downstream PD is started... table partition_table.finish_mark not exists for 5-th check, retry later /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_dispatcher/run.sh: line 1: 14283 Killed cdc_kafka_consumer --upstream-uri $SINK_URI --downstream-uri="mysql://root@127.0.0.1:3306/?safe-mode=true&batch-dml-enable=false" --upstream-tidb-dsn="root@tcp(${UP_TIDB_HOST}:${UP_TIDB_PORT})/?" --config="$CUR/conf/new_changefeed.toml" 2>&1 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_column_selector/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table partition_table.finish_mark not exists for 6-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) start tidb cluster in /tmp/tidb_cdc_test/kafka_column_selector Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table partition_table.finish_mark not exists for 7-th check, retry later [Pipeline] { [Pipeline] { [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // container [Pipeline] // timeout [Pipeline] // timeout [Pipeline] sh [Pipeline] } [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G01 Run cases: open_protocol_handle_key_only PROW_JOB_ID=77d328ba-2501-4ebd-a790-6ed9b1f9c95d JENKINS_NODE_COOKIE=a727e074-923f-4ae8-abf6-6b356674efa0 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-9prpt-cmcx5 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786704691812372484","prowjobid":"77d328ba-2501-4ebd-a790-6ed9b1f9c95d","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/5bf93c6caedff315c4c9650d80e951e31bc88a3d","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1843 TEST_GROUP=G01 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786704691812372484 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-9prpt GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-9prpt-cmcx5 pingcap_tiflow_pull_cdc_integration_kafka_test_1843-9prpt GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-9prpt-cmcx5 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1843 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/open_protocol_handle_key_only/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table partition_table.finish_mark not exists for 8-th check, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table partition_table.finish_mark exists check diff successfully Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 18:46:46 CST 2024] <<<<<< run test case partition_table success! >>>>>> start tidb cluster in /tmp/tidb_cdc_test/open_protocol_handle_key_only Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cda83940013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-rfvt7-rz8h5, pid:1414, start at 2024-05-04 18:46:44.753151235 +0800 CST m=+5.206603559 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:48:44.761 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:46:44.760 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:36:44.760 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cda83940013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-rfvt7-rz8h5, pid:1414, start at 2024-05-04 18:46:44.753151235 +0800 CST m=+5.206603559 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:48:44.761 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:46:44.760 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:36:44.760 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cda85e40002 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-rfvt7-rz8h5, pid:1491, start at 2024-05-04 18:46:44.857954371 +0800 CST m=+5.256198143 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:48:44.864 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:46:44.857 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:36:44.857 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/foreign_key/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/foreign_key/tiflash/log/error.log arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/foreign_key/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/foreign_key/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/foreign_key/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.foreign_key.cli.2835.out cli tso query --pd=http://127.0.0.1:2379 Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + set +x + tso='449529671898890242 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449529671898890242 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 18:46:51 CST 2024] <<<<<< START cdc server in foreign_key case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.foreign_key.28692871.out server --log-file /tmp/tidb_cdc_test/foreign_key/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/foreign_key/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdae724000d Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh, pid:15419, start at 2024-05-04 18:46:51.093600823 +0800 CST m=+5.164317991 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:48:51.102 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:46:51.081 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:36:51.081 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdae724000d Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh, pid:15419, start at 2024-05-04 18:46:51.093600823 +0800 CST m=+5.164317991 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:48:51.102 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:46:51.081 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:36:51.081 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdae9640016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vgp2b-16vbh, pid:15500, start at 2024-05-04 18:46:51.251366547 +0800 CST m=+5.254914794 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:48:51.259 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:46:51.225 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:36:51.225 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_column_selector/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_column_selector/tiflash/log/error.log arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_column_selector/tiflash-proxy.toml"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_column_selector/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_column_selector/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Sat May 4 18:46:54 CST 2024] <<<<<< START cdc server in kafka_column_selector case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_column_selector.1696216964.out server --log-file /tmp/tidb_cdc_test/kafka_column_selector/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_column_selector/cdc_data --cluster-id default + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:46:54 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/07d47343-be18-4482-9531-bde408f67083 {"id":"07d47343-be18-4482-9531-bde408f67083","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819612} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f433676b0d7 07d47343-be18-4482-9531-bde408f67083 /tidb/cdc/default/default/upstream/7365094074810588274 {"id":7365094074810588274,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/07d47343-be18-4482-9531-bde408f67083 {"id":"07d47343-be18-4482-9531-bde408f67083","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819612} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f433676b0d7 07d47343-be18-4482-9531-bde408f67083 /tidb/cdc/default/default/upstream/7365094074810588274 {"id":7365094074810588274,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/07d47343-be18-4482-9531-bde408f67083 {"id":"07d47343-be18-4482-9531-bde408f67083","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819612} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f433676b0d7 07d47343-be18-4482-9531-bde408f67083 /tidb/cdc/default/default/upstream/7365094074810588274 {"id":7365094074810588274,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.foreign_key.cli.2928.out cli changefeed create --start-ts=449529671898890242 '--sink-uri=kafka://127.0.0.1:9092/ticdc-foreign-key-test-13599?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: 59c03bfa-686d-43fc-96f6-7a0beb5d041f Info: {"upstream_id":7365094074810588274,"namespace":"default","id":"59c03bfa-686d-43fc-96f6-7a0beb5d041f","sink_uri":"kafka://127.0.0.1:9092/ticdc-foreign-key-test-13599?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T18:46:55.251686665+08:00","start_ts":449529671898890242,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529671898890242,"checkpoint_ts":449529671898890242,"checkpoint_time":"2024-05-04 18:46:50.210"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + set +x [Sat May 4 18:46:56 CST 2024] <<<<<< START kafka consumer in foreign_key case >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:46:57 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d0ac4045-73f1-4c83-99d0-c3a9876a3a71 {"id":"d0ac4045-73f1-4c83-99d0-c3a9876a3a71","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819614} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43368dd3c5 d0ac4045-73f1-4c83-99d0-c3a9876a3a71 /tidb/cdc/default/default/upstream/7365094113210679789 {"id":7365094113210679789,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d0ac4045-73f1-4c83-99d0-c3a9876a3a71 {"id":"d0ac4045-73f1-4c83-99d0-c3a9876a3a71","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819614} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43368dd3c5 d0ac4045-73f1-4c83-99d0-c3a9876a3a71 /tidb/cdc/default/default/upstream/7365094113210679789 {"id":7365094113210679789,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d0ac4045-73f1-4c83-99d0-c3a9876a3a71 {"id":"d0ac4045-73f1-4c83-99d0-c3a9876a3a71","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819614} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43368dd3c5 d0ac4045-73f1-4c83-99d0-c3a9876a3a71 /tidb/cdc/default/default/upstream/7365094113210679789 {"id":7365094113210679789,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_column_selector.cli.17026.out cli changefeed create --start-ts=449529672979185665 '--sink-uri=kafka://127.0.0.1:9092/column-selector-test?protocol=canal-json&partition-num=1&enable-tidb-extension=true' -c test --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_column_selector/conf/changefeed.toml Create changefeed successfully! ID: test Info: {"upstream_id":7365094113210679789,"namespace":"default","id":"test","sink_uri":"kafka://127.0.0.1:9092/column-selector-test?protocol=canal-json\u0026partition-num=1\u0026enable-tidb-extension=true","create_time":"2024-05-04T18:46:57.912280819+08:00","start_ts":449529672979185665,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"column_selectors":[{"matcher":["test.t1"],"columns":["a","b"]},{"matcher":["test.*"],"columns":["*","!b"]},{"matcher":["test1.t1"],"columns":["column*","!column1"]}],"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529672979185665,"checkpoint_ts":449529672979185665,"checkpoint_time":"2024-05-04 18:46:54.331"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdb4dd80014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-9prpt-cmcx5, pid:1411, start at 2024-05-04 18:46:57.693267232 +0800 CST m=+5.217880517 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:48:57.701 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:46:57.704 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:36:57.704 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdb4dd80014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-9prpt-cmcx5, pid:1411, start at 2024-05-04 18:46:57.693267232 +0800 CST m=+5.217880517 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:48:57.701 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:46:57.704 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:36:57.704 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdb4f200013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-9prpt-cmcx5, pid:1500, start at 2024-05-04 18:46:57.76571708 +0800 CST m=+5.236774255 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:48:57.774 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:46:57.736 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:36:57.736 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } \033[0;36m<<< Run all test success >>>\033[0m + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.open_protocol_handle_key_only.cli.2856.out cli tso query --pd=http://127.0.0.1:2379 Starting build checksum checker... go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading go.uber.org/zap v1.27.0 go: downloading golang.org/x/net v0.24.0 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading github.com/spf13/cobra v1.8.0 go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading github.com/aws/aws-sdk-go-v2 v1.19.1 go: downloading github.com/pierrec/lz4/v4 v4.1.18 go: downloading github.com/gin-gonic/gin v1.9.1 go: downloading github.com/IBM/sarama v1.41.2 go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading github.com/apache/pulsar-client-go v0.11.0 go: downloading github.com/xdg/scram v1.0.5 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/tinylib/msgp v1.1.6 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading github.com/coreos/go-semver v0.3.1 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/KimMachineGun/automemlimit v0.2.4 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading golang.org/x/sync v0.7.0 go: downloading github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/phayes/freeport v0.0.0-20180830031419-95f893ade6f2 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/stretchr/testify v1.9.0 go: downloading golang.org/x/time v0.5.0 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading github.com/containerd/cgroups v1.0.4 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading github.com/xdg/stringprep v1.0.3 go: downloading golang.org/x/crypto v0.22.0 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/philhofer/fwd v1.1.1 go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading github.com/stretchr/objx v0.5.2 go: downloading github.com/gin-contrib/sse v0.1.0 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd go: downloading golang.org/x/text v0.14.0 go: downloading cloud.google.com/go v0.112.2 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/godbus/dbus/v5 v5.0.4 go: downloading github.com/opencontainers/runtime-spec v1.0.2 go: downloading github.com/cilium/ebpf v0.4.0 go: downloading go.uber.org/multierr v1.11.0 go: downloading golang.org/x/sys v0.19.0 go: downloading github.com/sirupsen/logrus v1.9.3 go: downloading github.com/go-playground/validator/v10 v10.14.0 go: downloading github.com/pelletier/go-toml/v2 v2.0.8 go: downloading github.com/ugorji/go/codec v1.2.11 go: downloading google.golang.org/protobuf v1.33.0 go: downloading github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843/tiflow-cdc) 3727233024 bytes in 18.47 secs (201794830 bytes/sec) [Pipeline] { [Pipeline] cache go: downloading github.com/aws/smithy-go v1.13.5 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/bits-and-blooms/bitset v1.4.0 go: downloading github.com/linkedin/goavro/v2 v2.11.1 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/golang/protobuf v1.5.4 go: downloading github.com/eapache/go-resiliency v1.4.0 go: downloading github.com/eapache/go-xerial-snappy v0.0.0-20230731223053-c322873962e3 go: downloading github.com/eapache/queue v1.1.0 go: downloading github.com/hashicorp/go-multierror v1.1.1 go: downloading github.com/jcmturner/gofork v1.7.6 go: downloading github.com/jcmturner/gokrb5/v8 v8.4.4 go: downloading github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading github.com/google/uuid v1.6.0 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading google.golang.org/api v0.170.0 go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda go: downloading github.com/go-playground/universal-translator v0.18.1 go: downloading github.com/gabriel-vasile/mimetype v1.4.2 go: downloading github.com/leodido/go-urn v1.2.4 go: downloading github.com/AthenZ/athenz v1.10.39 go: downloading github.com/spaolacci/murmur3 v1.1.0 go: downloading golang.org/x/mod v0.17.0 go: downloading github.com/DataDog/zstd v1.5.5 go: downloading github.com/pierrec/lz4 v2.6.1+incompatible go: downloading github.com/golang/snappy v0.0.4 go: downloading github.com/hashicorp/errwrap v1.0.0 go: downloading github.com/jcmturner/dnsutils/v2 v2.0.0 go: downloading github.com/hashicorp/go-uuid v1.0.3 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading github.com/99designs/keyring v1.2.1 go: downloading github.com/go-playground/locales v0.14.1 go: downloading github.com/jcmturner/rpc/v2 v2.0.3 go: downloading github.com/dvsekhvalnov/jose2go v1.5.0 go: downloading github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c go: downloading github.com/mtibben/percent v0.2.1 go: downloading golang.org/x/term v0.19.0 go: downloading github.com/jcmturner/aescts/v2 v2.0.0 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/go-logr/logr v1.4.1 go: downloading github.com/go-logr/stdr v1.2.2 + set +x + tso='449529675292082177 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449529675292082177 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 18:47:04 CST 2024] <<<<<< START cdc server in open_protocol_handle_key_only case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.open_protocol_handle_key_only.28932895.out server --log-file /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table foreign_key.finish_mark not exists for 1-th check, retry later go: downloading github.com/ardielle/ardielle-go v1.5.2 go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading github.com/google/btree v1.1.2 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading golang.org/x/tools v0.20.0 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading github.com/kr/text v0.2.0 table foreign_key.finish_mark not exists for 2-th check, retry later go: downloading github.com/jmespath/go-jmespath v0.4.0 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:47:07 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/dcac6b5c-0ddf-4a19-a935-93c359bebdd7 {"id":"dcac6b5c-0ddf-4a19-a935-93c359bebdd7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819625} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4336a962d3 dcac6b5c-0ddf-4a19-a935-93c359bebdd7 /tidb/cdc/default/default/upstream/7365094142069679001 {"id":7365094142069679001,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/dcac6b5c-0ddf-4a19-a935-93c359bebdd7 {"id":"dcac6b5c-0ddf-4a19-a935-93c359bebdd7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819625} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4336a962d3 dcac6b5c-0ddf-4a19-a935-93c359bebdd7 /tidb/cdc/default/default/upstream/7365094142069679001 {"id":7365094142069679001,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/dcac6b5c-0ddf-4a19-a935-93c359bebdd7 {"id":"dcac6b5c-0ddf-4a19-a935-93c359bebdd7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819625} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4336a962d3 dcac6b5c-0ddf-4a19-a935-93c359bebdd7 /tidb/cdc/default/default/upstream/7365094142069679001 {"id":7365094142069679001,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.open_protocol_handle_key_only.cli.2952.out cli changefeed create --start-ts=449529675292082177 '--sink-uri=kafka://127.0.0.1:9092/open-protocol-handle-key-only?protocol=open-protocol&max-message-bytes=800&kafka-version=2.4.1' --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/open_protocol_handle_key_only/conf/changefeed.toml go: downloading github.com/google/s2a-go v0.1.7 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading github.com/felixge/httpsnoop v1.0.4 Create changefeed successfully! ID: 6a4b8774-650b-4deb-8658-b039c08eec73 Info: {"upstream_id":7365094142069679001,"namespace":"default","id":"6a4b8774-650b-4deb-8658-b039c08eec73","sink_uri":"kafka://127.0.0.1:9092/open-protocol-handle-key-only?protocol=open-protocol\u0026max-message-bytes=800\u0026kafka-version=2.4.1","create_time":"2024-05-04T18:47:08.202970225+08:00","start_ts":449529675292082177,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"kafka_config":{"large_message_handle":{"large_message_handle_option":"handle-key-only","large_message_handle_compression":"lz4","claim_check_storage_uri":""}},"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529675292082177,"checkpoint_ts":449529675292082177,"checkpoint_time":"2024-05-04 18:47:03.154"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... table foreign_key.finish_mark not exists for 3-th check, retry later + set +x table test.finish_mark not exists for 1-th check, retry later table foreign_key.finish_mark not exists for 4-th check, retry later table test.finish_mark not exists for 2-th check, retry later table foreign_key.finish_mark not exists for 5-th check, retry later table test.finish_mark exists check diff failed 1-th time, retry later table foreign_key.finish_mark not exists for 6-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843/tiflow-cdc) 3727233024 bytes in 10.51 secs (354791877 bytes/sec) [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] sh [Pipeline] sh check diff failed 2-th time, retry later + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 table foreign_key.finish_mark not exists for 7-th check, retry later [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] sh [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G14 Run cases: changefeed_finish force_replicate_table PROW_JOB_ID=77d328ba-2501-4ebd-a790-6ed9b1f9c95d JENKINS_NODE_COOKIE=1a89f4dd-4363-4e21-9283-1a2a30a2c573 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-bsrn3-75hrt HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786704691812372484","prowjobid":"77d328ba-2501-4ebd-a790-6ed9b1f9c95d","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/5bf93c6caedff315c4c9650d80e951e31bc88a3d","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1843 TEST_GROUP=G14 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786704691812372484 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-bsrn3 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-bsrn3-75hrt pingcap_tiflow_pull_cdc_integration_kafka_test_1843-bsrn3 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-bsrn3-75hrt GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1843 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_finish/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G07 Run cases: kv_client_stream_reconnect cdc split_region PROW_JOB_ID=77d328ba-2501-4ebd-a790-6ed9b1f9c95d JENKINS_NODE_COOKIE=d24926b3-1f20-4bb4-8f7c-415256a375f2 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-hwpd5-mn85x HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786704691812372484","prowjobid":"77d328ba-2501-4ebd-a790-6ed9b1f9c95d","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/5bf93c6caedff315c4c9650d80e951e31bc88a3d","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1843 TEST_GROUP=G07 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786704691812372484 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-hwpd5 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-hwpd5-mn85x pingcap_tiflow_pull_cdc_integration_kafka_test_1843-hwpd5 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-hwpd5-mn85x GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1843 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kv_client_stream_reconnect/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh check diff failed 3-th time, retry later + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G06 Run cases: sink_retry changefeed_error ddl_sequence resourcecontrol PROW_JOB_ID=77d328ba-2501-4ebd-a790-6ed9b1f9c95d JENKINS_NODE_COOKIE=2cce5664-5014-4808-91a1-eb6a59d38d25 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-kz8q3-1pch3 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786704691812372484","prowjobid":"77d328ba-2501-4ebd-a790-6ed9b1f9c95d","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/5bf93c6caedff315c4c9650d80e951e31bc88a3d","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1843 TEST_GROUP=G06 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786704691812372484 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-kz8q3 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-kz8q3 pingcap-tiflow-pull-cdc-integration-kafka-test-1843-kz8q3-1pch3 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-kz8q3-1pch3 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1843 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sink_retry/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G08 Run cases: processor_err_chan changefeed_reconstruct multi_capture synced_status_with_redo PROW_JOB_ID=77d328ba-2501-4ebd-a790-6ed9b1f9c95d JENKINS_NODE_COOKIE=0188f705-315a-4008-80c3-de296966cd9b BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-0ps0r-q3pvp HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786704691812372484","prowjobid":"77d328ba-2501-4ebd-a790-6ed9b1f9c95d","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/5bf93c6caedff315c4c9650d80e951e31bc88a3d","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1843 TEST_GROUP=G08 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786704691812372484 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-0ps0r GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-0ps0r pingcap-tiflow-pull-cdc-integration-kafka-test-1843-0ps0r-q3pvp GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-0ps0r-q3pvp GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1843 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_err_chan/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh table foreign_key.finish_mark not exists for 8-th check, retry later + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G15 Run cases: new_ci_collation batch_add_table multi_rocks PROW_JOB_ID=77d328ba-2501-4ebd-a790-6ed9b1f9c95d JENKINS_NODE_COOKIE=579734e5-e246-478b-8e72-ae082b430ed3 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vzvzj-mv0bz HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786704691812372484","prowjobid":"77d328ba-2501-4ebd-a790-6ed9b1f9c95d","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/5bf93c6caedff315c4c9650d80e951e31bc88a3d","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1843 TEST_GROUP=G15 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786704691812372484 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-vzvzj GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-vzvzj pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vzvzj-mv0bz GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vzvzj-mv0bz GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1843 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/new_ci_collation/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G13 Run cases: tiflash region_merge common_1 PROW_JOB_ID=77d328ba-2501-4ebd-a790-6ed9b1f9c95d JENKINS_NODE_COOKIE=477548ab-4ae4-4a37-8986-b777e9a5f865 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-shhrm-fhl4k HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786704691812372484","prowjobid":"77d328ba-2501-4ebd-a790-6ed9b1f9c95d","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/5bf93c6caedff315c4c9650d80e951e31bc88a3d","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1843 TEST_GROUP=G13 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786704691812372484 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-shhrm GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-shhrm pingcap-tiflow-pull-cdc-integration-kafka-test-1843-shhrm-fhl4k GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-shhrm-fhl4k GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1843 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/tiflash/run.sh using Sink-Type: kafka... <<================= [Pipeline] sh The 1 times to try to start tidb cluster... + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G05 Run cases: charset_gbk ddl_manager multi_source PROW_JOB_ID=77d328ba-2501-4ebd-a790-6ed9b1f9c95d JENKINS_NODE_COOKIE=80d26ff6-975c-46e0-965a-97b2985f0f6a BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-dnxvj-w926p HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786704691812372484","prowjobid":"77d328ba-2501-4ebd-a790-6ed9b1f9c95d","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/5bf93c6caedff315c4c9650d80e951e31bc88a3d","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1843 TEST_GROUP=G05 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786704691812372484 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-dnxvj GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-dnxvj-w926p pingcap_tiflow_pull_cdc_integration_kafka_test_1843-dnxvj GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-dnxvj-w926p GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1843 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/charset_gbk/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] // container [Pipeline] // container [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G17 Run cases: clustered_index processor_resolved_ts_fallback PROW_JOB_ID=77d328ba-2501-4ebd-a790-6ed9b1f9c95d JENKINS_NODE_COOKIE=10c40458-d602-4450-a252-b2d9a3f7cd14 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-dqmt8-35c8s HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786704691812372484","prowjobid":"77d328ba-2501-4ebd-a790-6ed9b1f9c95d","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/5bf93c6caedff315c4c9650d80e951e31bc88a3d","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1843 TEST_GROUP=G17 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786704691812372484 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-dqmt8 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-dqmt8-35c8s pingcap_tiflow_pull_cdc_integration_kafka_test_1843-dqmt8 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-dqmt8-35c8s GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1843 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/clustered_index/run.sh using Sink-Type: kafka... <<================= [Sat May 4 18:47:20 CST 2024] <<<<<< skip test case clustered_index for kafka! >>>>>> find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_resolved_ts_fallback/run.sh using Sink-Type: kafka... <<================= [Sat May 4 18:47:20 CST 2024] <<<<<< run test case processor_resolved_ts_fallback success! >>>>>> [Pipeline] sh check diff failed 4-th time, retry later + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G10 Run cases: default_value simple cdc_server_tips event_filter sql_mode PROW_JOB_ID=77d328ba-2501-4ebd-a790-6ed9b1f9c95d JENKINS_NODE_COOKIE=fccdeaf7-34de-4a4f-9c96-b427fddfddc5 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-xgxq5-f3x9r HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786704691812372484","prowjobid":"77d328ba-2501-4ebd-a790-6ed9b1f9c95d","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/5bf93c6caedff315c4c9650d80e951e31bc88a3d","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1843 TEST_GROUP=G10 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786704691812372484 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-xgxq5 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-xgxq5 pingcap-tiflow-pull-cdc-integration-kafka-test-1843-xgxq5-f3x9r GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-xgxq5-f3x9r GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1843 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/default_value/run.sh using Sink-Type: kafka... <<================= [Pipeline] // container [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G09 Run cases: gc_safepoint changefeed_pause_resume cli_with_auth savepoint synced_status PROW_JOB_ID=77d328ba-2501-4ebd-a790-6ed9b1f9c95d JENKINS_NODE_COOKIE=28f8de5d-4a95-42a5-bb79-ce71cf0aa2bc BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-jrfkf-h5db1 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786704691812372484","prowjobid":"77d328ba-2501-4ebd-a790-6ed9b1f9c95d","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/5bf93c6caedff315c4c9650d80e951e31bc88a3d","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1843 TEST_GROUP=G09 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786704691812372484 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-jrfkf GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-jrfkf-h5db1 pingcap_tiflow_pull_cdc_integration_kafka_test_1843-jrfkf GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-jrfkf-h5db1 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1843 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/gc_safepoint/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table foreign_key.finish_mark exists check diff successfully wait process cdc.test exit for 1-th time... + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G11 start tidb cluster in /tmp/tidb_cdc_test/sink_retry Starting Upstream PD... wait process cdc.test exit for 2-th time... [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843/tiflow-cdc already exists) Run cases: resolve_lock move_table autorandom generate_column PROW_JOB_ID=77d328ba-2501-4ebd-a790-6ed9b1f9c95d JENKINS_NODE_COOKIE=667c0545-0c5a-4b26-9988-6cf8798014f4 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-zthb7-4n100 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786704691812372484","prowjobid":"77d328ba-2501-4ebd-a790-6ed9b1f9c95d","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/5bf93c6caedff315c4c9650d80e951e31bc88a3d","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1843 TEST_GROUP=G11 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786704691812372484 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-zthb7 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-zthb7-4n100 pingcap_tiflow_pull_cdc_integration_kafka_test_1843-zthb7 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-zthb7-4n100 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1843 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/resolve_lock/run.sh using Sink-Type: kafka... <<================= Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/changefeed_finish Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/new_ci_collation Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/kv_client_stream_reconnect Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 18:47:22 CST 2024] <<<<<< run test case foreign_key success! >>>>>> [Pipeline] } check diff failed 5-th time, retry later [Pipeline] } start tidb cluster in /tmp/tidb_cdc_test/tiflash Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/charset_gbk Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/processor_err_chan Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... \033[0;36m<<< Run all test success >>>\033[0m Verifying downstream PD is started... [Pipeline] // cache [Pipeline] } Verifying downstream PD is started... [Pipeline] // timeout [Pipeline] } [Pipeline] // timeout Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843/tiflow-cdc already exists) [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // container [Pipeline] sh check diff failed 6-th time, retry later start tidb cluster in /tmp/tidb_cdc_test/gc_safepoint Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... [Pipeline] // cache [Pipeline] // container [Pipeline] } [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G12 Run cases: many_pk_or_uk capture_session_done_during_task ddl_attributes PROW_JOB_ID=77d328ba-2501-4ebd-a790-6ed9b1f9c95d JENKINS_NODE_COOKIE=85ffbbb5-3044-47aa-b2f0-508289474612 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-nnjsj-fkpsf HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786704691812372484","prowjobid":"77d328ba-2501-4ebd-a790-6ed9b1f9c95d","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/5bf93c6caedff315c4c9650d80e951e31bc88a3d","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1843 TEST_GROUP=G12 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786704691812372484 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-nnjsj GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-nnjsj-fkpsf pingcap_tiflow_pull_cdc_integration_kafka_test_1843-nnjsj GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-nnjsj-fkpsf GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1843 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/many_pk_or_uk/run.sh using Sink-Type: kafka... <<================= Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... [Pipeline] // withCredentials [Pipeline] } [Pipeline] // dir TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [Pipeline] } [Pipeline] // timeout [Pipeline] } Verifying downstream PD is started... [Pipeline] // withCredentials [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // timeout [Pipeline] } Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [Pipeline] // container [Pipeline] } Verifying downstream PD is started... Starting Upstream TiDB... [Pipeline] // stage [Pipeline] } [Pipeline] // withEnv Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [Pipeline] } Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [Pipeline] // container [Pipeline] } check diff failed 7-th time, retry later [Pipeline] // node [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // node [Pipeline] } Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release The 1 times to try to start tidb cluster... + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G16 Run cases: owner_resign processor_etcd_worker_delay sink_hang PROW_JOB_ID=77d328ba-2501-4ebd-a790-6ed9b1f9c95d JENKINS_NODE_COOKIE=07776d1f-16bb-4fcc-a382-b507de11bea3 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-cn5n8-5n6zh HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786704691812372484","prowjobid":"77d328ba-2501-4ebd-a790-6ed9b1f9c95d","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/5bf93c6caedff315c4c9650d80e951e31bc88a3d","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1843 TEST_GROUP=G16 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786704691812372484 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1843/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1843-cn5n8 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-cn5n8-5n6zh pingcap_tiflow_pull_cdc_integration_kafka_test_1843-cn5n8 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1843-cn5n8-5n6zh GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1843 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/owner_resign/run.sh using Sink-Type: kafka... <<================= [Sat May 4 18:47:27 CST 2024] <<<<<< run test case owner_resign success! >>>>>> [Pipeline] // withEnv [Pipeline] } [Pipeline] // podTemplate [Pipeline] } Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] // stage [Pipeline] } [Pipeline] // withEnv [Pipeline] } Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] // stage [Pipeline] } Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore The 1 times to try to start tidb cluster... Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore check diff failed 8-th time, retry later The 1 times to try to start tidb cluster... Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 9-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_etcd_worker_delay/run.sh using Sink-Type: kafka... <<================= [Sat May 4 18:47:31 CST 2024] <<<<<< run test case processor_etcd_worker_delay success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) start tidb cluster in /tmp/tidb_cdc_test/resolve_lock Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) start tidb cluster in /tmp/tidb_cdc_test/many_pk_or_uk Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/default_value Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 10-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdd765c000a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-hwpd5-mn85x, pid:1292, start at 2024-05-04 18:47:33.020620634 +0800 CST m=+5.231927919 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:33.029 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:33.015 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:33.015 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdd765c000a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-hwpd5-mn85x, pid:1292, start at 2024-05-04 18:47:33.020620634 +0800 CST m=+5.231927919 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:33.029 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:33.015 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:33.015 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdd770c0013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-hwpd5-mn85x, pid:1373, start at 2024-05-04 18:47:33.084155593 +0800 CST m=+5.243516692 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:33.090 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:33.059 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:33.059 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdd76cc000c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-bsrn3-75hrt, pid:1355, start at 2024-05-04 18:47:33.054813042 +0800 CST m=+5.102963545 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:33.064 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:33.043 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:33.043 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdd76cc000c Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-bsrn3-75hrt, pid:1355, start at 2024-05-04 18:47:33.054813042 +0800 CST m=+5.102963545 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:33.064 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:33.043 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:33.043 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdd78fc0010 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-bsrn3-75hrt, pid:1417, start at 2024-05-04 18:47:33.198670465 +0800 CST m=+5.189337447 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:33.205 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:33.183 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:33.183 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/changefeed_finish/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/changefeed_finish/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_finish/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_finish/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_finish/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Verifying downstream PD is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdd81e80003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-shhrm-fhl4k, pid:1398, start at 2024-05-04 18:47:33.755218643 +0800 CST m=+5.303168831 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:33.763 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:33.754 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:33.754 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdd80000014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-kz8q3-1pch3, pid:1290, start at 2024-05-04 18:47:33.674596806 +0800 CST m=+5.156812338 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:33.683 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:33.682 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:33.682 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdd80000014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-kz8q3-1pch3, pid:1290, start at 2024-05-04 18:47:33.674596806 +0800 CST m=+5.156812338 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:33.683 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:33.682 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:33.682 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdd826c0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-kz8q3-1pch3, pid:1369, start at 2024-05-04 18:47:33.828046786 +0800 CST m=+5.255136142 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:33.837 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:33.837 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:33.837 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/sink_retry/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/sink_retry/tiflash/log/error.log arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/sink_retry/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/sink_retry/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/sink_retry/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sink_hang/run.sh using Sink-Type: kafka... <<================= [Sat May 4 18:47:35 CST 2024] <<<<<< run test case sink_hang success! >>>>>> VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdd66f80005 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-0ps0r-q3pvp, pid:1349, start at 2024-05-04 18:47:32.034785483 +0800 CST m=+5.260332823 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:32.043 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:32.030 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:32.030 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdd66f80005 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-0ps0r-q3pvp, pid:1349, start at 2024-05-04 18:47:32.034785483 +0800 CST m=+5.260332823 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:32.043 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:32.030 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:32.030 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdd67a40015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-0ps0r-q3pvp, pid:1430, start at 2024-05-04 18:47:32.109421802 +0800 CST m=+5.280388619 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:32.118 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:32.123 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:32.123 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/processor_err_chan/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/processor_err_chan/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/processor_err_chan/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/processor_err_chan/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/processor_err_chan/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } check diff failed at last A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log' A total of 2 tables need to be compared Comparing the table structure of ``test`.`t`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/1 Comparing the table structure of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... Comparing the table data of ``test`.`finish_mark`` ... _____________________________________________________________________________ Progress [>------------------------------------------------------------] 0% 0/2 Comparing the table data of ``test`.`finish_mark`` ... equivalent Comparing the table data of ``test`.`t`` ... _____________________________________________________________________________ Progress [==============================>------------------------------] 50% 0/1 Comparing the table data of ``test`.`t`` ... failure _____________________________________________________________________________ Progress [============================================================>] 100% 0/0 Progress [============================================================>] 100% 0/0 The data of `test`.`t` is not equal The rest of tables are all equal. A total of 2 tables have been compared, 1 tables finished, 1 tables failed, 0 tables skipped. The patch file has been generated in '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/fix-on-tidb0/' You can view the comparision details through '/tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log' cat: /tmp/tidb_cdc_test/open_protocol_handle_key_only/sync_diff/output/sync_diff.log: No such file or directory ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [Sat May 4 18:47:36 CST 2024] <<<<<< START cdc server in kv_client_stream_reconnect case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/kv/kvClientForceReconnect=return(true)' + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kv_client_stream_reconnect.28142816.out server --log-file /tmp/tidb_cdc_test/kv_client_stream_reconnect/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kv_client_stream_reconnect/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdd9138000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-dnxvj-w926p, pid:1350, start at 2024-05-04 18:47:34.749882446 +0800 CST m=+5.189709075 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:34.756 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:34.734 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:34.734 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + '[' 0 -eq 50 ']' + sleep 3 [Sat May 4 18:47:36 CST 2024] <<<<<< START cdc server in changefeed_finish case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_finish.28052807.out server --log-file /tmp/tidb_cdc_test/changefeed_finish/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_finish/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdd81e80003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-shhrm-fhl4k, pid:1398, start at 2024-05-04 18:47:33.755218643 +0800 CST m=+5.303168831 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:33.763 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:33.754 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:33.754 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdd81f40005 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-shhrm-fhl4k, pid:1479, start at 2024-05-04 18:47:33.762015237 +0800 CST m=+5.256882710 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:33.771 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:33.757 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:33.757 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/tiflash/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/tiflash/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/tiflash/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/tiflash/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/tiflash/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sink_retry.cli.2799.out cli tso query --pd=http://127.0.0.1:2379 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdda650000a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-jrfkf-h5db1, pid:1288, start at 2024-05-04 18:47:36.094509615 +0800 CST m=+5.267632548 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:36.102 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:36.084 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:36.084 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdd85fc0015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vzvzj-mv0bz, pid:1398, start at 2024-05-04 18:47:34.065078631 +0800 CST m=+5.207439625 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:34.071 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:34.065 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:34.065 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdd85fc0015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vzvzj-mv0bz, pid:1398, start at 2024-05-04 18:47:34.065078631 +0800 CST m=+5.207439625 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:34.071 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:34.065 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:34.065 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdd87700015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-vzvzj-mv0bz, pid:1476, start at 2024-05-04 18:47:34.133485306 +0800 CST m=+5.216682471 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:34.140 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:34.108 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:34.108 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/new_ci_collation/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/new_ci_collation/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/new_ci_collation/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/new_ci_collation/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/new_ci_collation/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Starting Upstream TiDB... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/ddl_puller_lag/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdd9138000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-dnxvj-w926p, pid:1350, start at 2024-05-04 18:47:34.749882446 +0800 CST m=+5.189709075 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:34.756 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:34.734 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:34.734 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdd91340014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-dnxvj-w926p, pid:1436, start at 2024-05-04 18:47:34.765354857 +0800 CST m=+5.149787157 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:34.772 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:34.733 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:34.733 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/charset_gbk/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/charset_gbk/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/charset_gbk/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/charset_gbk/tiflash/log/proxy.log"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/charset_gbk/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + tso='449529684212580353 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449529684212580353 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x ***************** properties ***************** "insertproportion"="0" "dotransactions"="false" "recordcount"="10" "scanproportion"="0" "operationcount"="0" "readproportion"="0" "mysql.port"="4000" "mysql.db"="sink_retry" "requestdistribution"="uniform" "workload"="core" "threadcount"="2" "mysql.host"="127.0.0.1" "readallfields"="true" "mysql.user"="root" "updateproportion"="0" ********************************************** + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.tiflash.cli.2801.out cli tso query --pd=http://127.0.0.1:2379 Run finished, takes 10.799204ms INSERT - Takes(s): 0.0, Count: 10, OPS: 1639.8, Avg(us): 2061, Min(us): 1178, Max(us): 4574, 95th(us): 5000, 99th(us): 5000 [Sat May 4 18:47:38 CST 2024] <<<<<< START cdc server in sink_retry case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/sink/dmlsink/txn/mysql/MySQLSinkTxnRandomError=25%return(true)' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sink_retry.28572859.out server --log-file /tmp/tidb_cdc_test/sink_retry/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/sink_retry/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdda650000a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-jrfkf-h5db1, pid:1288, start at 2024-05-04 18:47:36.094509615 +0800 CST m=+5.267632548 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:36.102 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:36.084 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:36.084 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cdda66c000a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-jrfkf-h5db1, pid:1375, start at 2024-05-04 18:47:36.10517661 +0800 CST m=+5.214250074 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:36.112 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:36.091 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:36.091 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/gc_safepoint/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/gc_safepoint/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/gc_safepoint/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/gc_safepoint/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/gc_safepoint/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:47:39 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3330d47a-7959-4c38-bc11-d38d1630ed59 {"id":"3330d47a-7959-4c38-bc11-d38d1630ed59","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819656} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f433731eec4 3330d47a-7959-4c38-bc11-d38d1630ed59 /tidb/cdc/default/default/upstream/7365094292491239703 {"id":7365094292491239703,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3330d47a-7959-4c38-bc11-d38d1630ed59 {"id":"3330d47a-7959-4c38-bc11-d38d1630ed59","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819656} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f433731eec4 3330d47a-7959-4c38-bc11-d38d1630ed59 /tidb/cdc/default/default/upstream/7365094292491239703 {"id":7365094292491239703,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3330d47a-7959-4c38-bc11-d38d1630ed59 {"id":"3330d47a-7959-4c38-bc11-d38d1630ed59","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819656} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f433731eec4 3330d47a-7959-4c38-bc11-d38d1630ed59 /tidb/cdc/default/default/upstream/7365094292491239703 {"id":7365094292491239703,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:47:39 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f8561294-5473-4bd1-a0a0-734c89b2fcac {"id":"f8561294-5473-4bd1-a0a0-734c89b2fcac","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819656} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43373330c8 f8561294-5473-4bd1-a0a0-734c89b2fcac /tidb/cdc/default/default/upstream/7365094283488510489 {"id":7365094283488510489,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f8561294-5473-4bd1-a0a0-734c89b2fcac {"id":"f8561294-5473-4bd1-a0a0-734c89b2fcac","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819656} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43373330c8 f8561294-5473-4bd1-a0a0-734c89b2fcac /tidb/cdc/default/default/upstream/7365094283488510489 {"id":7365094283488510489,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f8561294-5473-4bd1-a0a0-734c89b2fcac {"id":"f8561294-5473-4bd1-a0a0-734c89b2fcac","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819656} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43373330c8 f8561294-5473-4bd1-a0a0-734c89b2fcac /tidb/cdc/default/default/upstream/7365094283488510489 {"id":7365094283488510489,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 18:47:39 CST 2024] <<<<<< START kafka consumer in kv_client_stream_reconnect case >>>>>> [Sat May 4 18:47:39 CST 2024] <<<<<< START kafka consumer in changefeed_finish case >>>>>> [Sat May 4 18:47:39 CST 2024] <<<<<< START cdc server in new_ci_collation case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.new_ci_collation.28352837.out server --log-file /tmp/tidb_cdc_test/new_ci_collation/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/new_ci_collation/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [Sat May 4 18:47:33 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>> schema registry uri found: 1 [Sat May 4 18:47:33 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>> schema registry uri found: 2 [Sat May 4 18:47:33 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>> schema registry uri found: 3 table test.table1 not exists for 1-th check, retry later table test.table1 not exists for 2-th check, retry later table test.table1 exists table test.table2 exists table test.table3 exists check diff successfully table test.table10 not exists for 1-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/ddl_puller_lag Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table test.table10 exists table test.table20 exists ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Sat May 4 18:47:39 CST 2024] <<<<<< START cdc server in charset_gbk case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.charset_gbk.28712873.out server --log-file /tmp/tidb_cdc_test/charset_gbk/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/charset_gbk/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff successfully + set +x + tso='449529684729266179 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449529684729266179 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 18:47:40 CST 2024] <<<<<< START cdc server in tiflash case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.tiflash.28412843.out server --log-file /tmp/tidb_cdc_test/tiflash/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/tiflash/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff failed 1-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Sat May 4 18:47:41 CST 2024] <<<<<< START cdc server in gc_safepoint case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/pkg/txnutil/gc/InjectGcSafepointUpdateInterval=return(500)' + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.gc_safepoint.28062808.out server --log-file /tmp/tidb_cdc_test/gc_safepoint/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/gc_safepoint/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:47:41 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/829f8295-7bc5-4180-8cc2-2366cf96fc8a {"id":"829f8295-7bc5-4180-8cc2-2366cf96fc8a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819659} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4337340ae9 829f8295-7bc5-4180-8cc2-2366cf96fc8a /tidb/cdc/default/default/upstream/7365094285929857287 {"id":7365094285929857287,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/829f8295-7bc5-4180-8cc2-2366cf96fc8a {"id":"829f8295-7bc5-4180-8cc2-2366cf96fc8a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819659} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4337340ae9 829f8295-7bc5-4180-8cc2-2366cf96fc8a /tidb/cdc/default/default/upstream/7365094285929857287 {"id":7365094285929857287,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/829f8295-7bc5-4180-8cc2-2366cf96fc8a {"id":"829f8295-7bc5-4180-8cc2-2366cf96fc8a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819659} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f4337340ae9 829f8295-7bc5-4180-8cc2-2366cf96fc8a /tidb/cdc/default/default/upstream/7365094285929857287 {"id":7365094285929857287,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sink_retry.cli.2910.out cli changefeed create --start-ts=449529684212580353 '--sink-uri=kafka://127.0.0.1:9092/ticdc-sink-retry-test-16344?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:47:42 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/119147dd-aa17-4339-8b00-bd7da5834528 {"id":"119147dd-aa17-4339-8b00-bd7da5834528","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819659} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43373588cc 119147dd-aa17-4339-8b00-bd7da5834528 /tidb/cdc/default/default/upstream/7365094296724957012 {"id":7365094296724957012,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/119147dd-aa17-4339-8b00-bd7da5834528 {"id":"119147dd-aa17-4339-8b00-bd7da5834528","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819659} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43373588cc 119147dd-aa17-4339-8b00-bd7da5834528 /tidb/cdc/default/default/upstream/7365094296724957012 {"id":7365094296724957012,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/119147dd-aa17-4339-8b00-bd7da5834528 {"id":"119147dd-aa17-4339-8b00-bd7da5834528","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819659} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43373588cc 119147dd-aa17-4339-8b00-bd7da5834528 /tidb/cdc/default/default/upstream/7365094296724957012 {"id":7365094296724957012,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Create changefeed successfully! ID: 97351673-ac96-4bc8-9908-79b5cd7e04ca Info: {"upstream_id":7365094285929857287,"namespace":"default","id":"97351673-ac96-4bc8-9908-79b5cd7e04ca","sink_uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-16344?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T18:47:42.453678732+08:00","start_ts":449529684212580353,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529684212580353,"checkpoint_ts":449529684212580353,"checkpoint_time":"2024-05-04 18:47:37.183"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... Create changefeed successfully! ID: 83e3f670-f2c0-4c13-8ac3-5c6a66e27c81 Info: {"upstream_id":7365094296724957012,"namespace":"default","id":"83e3f670-f2c0-4c13-8ac3-5c6a66e27c81","sink_uri":"kafka://127.0.0.1:9092/ticdc-new_ci_collation-test-26828?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T18:47:42.618940154+08:00","start_ts":449529684758364161,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529684758364161,"checkpoint_ts":449529684758364161,"checkpoint_time":"2024-05-04 18:47:39.265"} [Sat May 4 18:47:42 CST 2024] <<<<<< START kafka consumer in new_ci_collation case >>>>>> + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:47:43 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f4b84667-8f4b-45d4-b3c1-bb9408f75b52 {"id":"f4b84667-8f4b-45d4-b3c1-bb9408f75b52","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819660} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43373827ce f4b84667-8f4b-45d4-b3c1-bb9408f75b52 /tidb/cdc/default/default/upstream/7365094297153192896 {"id":7365094297153192896,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f4b84667-8f4b-45d4-b3c1-bb9408f75b52 {"id":"f4b84667-8f4b-45d4-b3c1-bb9408f75b52","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819660} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43373827ce f4b84667-8f4b-45d4-b3c1-bb9408f75b52 /tidb/cdc/default/default/upstream/7365094297153192896 {"id":7365094297153192896,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' \033[0;36m<<< Run all test success >>>\033[0m [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1843/tiflow-cdc already exists) [Pipeline] // cache [Pipeline] } + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f4b84667-8f4b-45d4-b3c1-bb9408f75b52 {"id":"f4b84667-8f4b-45d4-b3c1-bb9408f75b52","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819660} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43373827ce f4b84667-8f4b-45d4-b3c1-bb9408f75b52 /tidb/cdc/default/default/upstream/7365094297153192896 {"id":7365094297153192896,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: fc2c5a5f-d17d-43eb-a322-e33e7224a535 Info: {"upstream_id":7365094297153192896,"namespace":"default","id":"fc2c5a5f-d17d-43eb-a322-e33e7224a535","sink_uri":"mysql://normal:xxxxx@127.0.0.1:3306/","create_time":"2024-05-04T18:47:43.267202596+08:00","start_ts":449529684934000642,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529684934000642,"checkpoint_ts":449529684934000642,"checkpoint_time":"2024-05-04 18:47:39.935"} [Sat May 4 18:47:43 CST 2024] <<<<<< START kafka consumer in charset_gbk case >>>>>> [Pipeline] // dir [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // timeout [Pipeline] } Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [Pipeline] // stage check diff failed 2-th time, retry later [Pipeline] } [Pipeline] // container VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cde0bdc000b Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-xgxq5-f3x9r, pid:1479, start at 2024-05-04 18:47:42.593367678 +0800 CST m=+5.466833488 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:42.602 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:42.583 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:42.583 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] } [Pipeline] // withEnv [Pipeline] } + set +x [Sat May 4 18:47:43 CST 2024] <<<<<< START kafka consumer in sink_retry case >>>>>> VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cde0c980018 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-nnjsj-fkpsf, pid:1478, start at 2024-05-04 18:47:42.657549089 +0800 CST m=+5.129058215 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:42.663 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:42.630 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:42.630 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cde0c980018 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-nnjsj-fkpsf, pid:1478, start at 2024-05-04 18:47:42.657549089 +0800 CST m=+5.129058215 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:42.663 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:42.630 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:42.630 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cde0e3c000a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-nnjsj-fkpsf, pid:1564, start at 2024-05-04 18:47:42.7435575 +0800 CST m=+5.160219999 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:42.749 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:42.735 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:42.735 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/many_pk_or_uk/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/many_pk_or_uk/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/many_pk_or_uk/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/many_pk_or_uk/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/many_pk_or_uk/tiflash-proxy.toml"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [Pipeline] // node + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:47:43 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2e9d00f3-ed67-48f9-897f-a96e66e6a4c1 {"id":"2e9d00f3-ed67-48f9-897f-a96e66e6a4c1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819661} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f433737d7d3 2e9d00f3-ed67-48f9-897f-a96e66e6a4c1 /tidb/cdc/default/default/upstream/7365094294205294177 {"id":7365094294205294177,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2e9d00f3-ed67-48f9-897f-a96e66e6a4c1 {"id":"2e9d00f3-ed67-48f9-897f-a96e66e6a4c1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819661} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f433737d7d3 2e9d00f3-ed67-48f9-897f-a96e66e6a4c1 /tidb/cdc/default/default/upstream/7365094294205294177 {"id":7365094294205294177,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2e9d00f3-ed67-48f9-897f-a96e66e6a4c1 {"id":"2e9d00f3-ed67-48f9-897f-a96e66e6a4c1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819661} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f433737d7d3 2e9d00f3-ed67-48f9-897f-a96e66e6a4c1 /tidb/cdc/default/default/upstream/7365094294205294177 {"id":7365094294205294177,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } Create changefeed successfully! ID: 7458c558-6a8b-4477-b23d-506013b83094 Info: {"upstream_id":7365094294205294177,"namespace":"default","id":"7458c558-6a8b-4477-b23d-506013b83094","sink_uri":"kafka://127.0.0.1:9092/ticdc-tiflash-test-18811?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T18:47:44.061665522+08:00","start_ts":449529684729266179,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529684729266179,"checkpoint_ts":449529684729266179,"checkpoint_time":"2024-05-04 18:47:39.154"} [Sat May 4 18:47:44 CST 2024] <<<<<< START kafka consumer in tiflash case >>>>>> [Pipeline] // stage [Pipeline] } table cdc_tiflash_test.multi_data_type not exists for 1-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:47:44 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5bbf16b9-34b1-4198-8593-c56759924a6c {"id":"5bbf16b9-34b1-4198-8593-c56759924a6c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819661} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43373d0dd1 5bbf16b9-34b1-4198-8593-c56759924a6c /tidb/cdc/default/default/upstream/7365094294179641742 {"id":7365094294179641742,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5bbf16b9-34b1-4198-8593-c56759924a6c {"id":"5bbf16b9-34b1-4198-8593-c56759924a6c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819661} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43373d0dd1 5bbf16b9-34b1-4198-8593-c56759924a6c /tidb/cdc/default/default/upstream/7365094294179641742 {"id":7365094294179641742,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/5bbf16b9-34b1-4198-8593-c56759924a6c {"id":"5bbf16b9-34b1-4198-8593-c56759924a6c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819661} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43373d0dd1 5bbf16b9-34b1-4198-8593-c56759924a6c /tidb/cdc/default/default/upstream/7365094294179641742 {"id":7365094294179641742,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 18:47:44 CST 2024] <<<<<< START kafka consumer in gc_safepoint case >>>>>> 0 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cde19a40019 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-zthb7-4n100, pid:1374, start at 2024-05-04 18:47:43.497179202 +0800 CST m=+5.155777382 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:43.503 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:43.465 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:43.465 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cde19a40019 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-zthb7-4n100, pid:1374, start at 2024-05-04 18:47:43.497179202 +0800 CST m=+5.155777382 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:43.503 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:43.465 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:43.465 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cde1b140015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-zthb7-4n100, pid:1459, start at 2024-05-04 18:47:43.604953553 +0800 CST m=+5.208392142 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:43.611 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:43.608 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:43.608 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/resolve_lock/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/resolve_lock/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/resolve_lock/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/resolve_lock/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/resolve_lock/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } check diff failed 1-th time, retry later [Sat May 4 18:47:44 CST 2024] <<<<<< START cdc server in processor_err_chan case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/ProcessorAddTableError=1*return(true)' + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.processor_err_chan.29582960.out server --log-file /tmp/tidb_cdc_test/processor_err_chan/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/processor_err_chan/cdc_data --cluster-id default --config /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_err_chan/conf/server.toml --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff successfully VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cde0bdc000b Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-xgxq5-f3x9r, pid:1479, start at 2024-05-04 18:47:42.593367678 +0800 CST m=+5.466833488 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:42.602 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:42.583 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:42.583 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cde07200015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-xgxq5-f3x9r, pid:1571, start at 2024-05-04 18:47:42.31969432 +0800 CST m=+5.136092697 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:42.325 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:42.280 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:42.280 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/default_value/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/default_value/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/default_value/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/default_value/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/default_value/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.many_pk_or_uk.cli.2903.out cli tso query --pd=http://127.0.0.1:2379 table cdc_tiflash_test.multi_data_type not exists for 2-th check, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resolve_lock.cli.2911.out cli tso query --pd=http://127.0.0.1:2379 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:47:47 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/fa4548a5-1c95-4e42-b7ec-cf685b879830 {"id":"fa4548a5-1c95-4e42-b7ec-cf685b879830","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819664} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f433735327f fa4548a5-1c95-4e42-b7ec-cf685b879830 /tidb/cdc/default/default/upstream/7365094285229036933 {"id":7365094285229036933,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/fa4548a5-1c95-4e42-b7ec-cf685b879830 {"id":"fa4548a5-1c95-4e42-b7ec-cf685b879830","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819664} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f433735327f fa4548a5-1c95-4e42-b7ec-cf685b879830 /tidb/cdc/default/default/upstream/7365094285229036933 {"id":7365094285229036933,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/fa4548a5-1c95-4e42-b7ec-cf685b879830 {"id":"fa4548a5-1c95-4e42-b7ec-cf685b879830","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819664} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f433735327f fa4548a5-1c95-4e42-b7ec-cf685b879830 /tidb/cdc/default/default/upstream/7365094285229036933 {"id":7365094285229036933,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x check diff failed 2-th time, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.default_value.cli.2913.out cli tso query --pd=http://127.0.0.1:2379 [Sat May 4 18:47:47 CST 2024] <<<<<< START kafka consumer in processor_err_chan case >>>>>> check_changefeed_state http://127.0.0.1:2379 b687ec0e-b681-4184-bf1b-f1952d61b43a normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=b687ec0e-b681-4184-bf1b-f1952d61b43a + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c b687ec0e-b681-4184-bf1b-f1952d61b43a -s + info='{ "upstream_id": 7365094285229036933, "namespace": "default", "id": "b687ec0e-b681-4184-bf1b-f1952d61b43a", "state": "normal", "checkpoint_tso": 449529686846341122, "checkpoint_time": "2024-05-04 18:47:47.230", "error": null }' + echo '{ "upstream_id": 7365094285229036933, "namespace": "default", "id": "b687ec0e-b681-4184-bf1b-f1952d61b43a", "state": "normal", "checkpoint_tso": 449529686846341122, "checkpoint_time": "2024-05-04 18:47:47.230", "error": null }' { "upstream_id": 7365094285229036933, "namespace": "default", "id": "b687ec0e-b681-4184-bf1b-f1952d61b43a", "state": "normal", "checkpoint_tso": 449529686846341122, "checkpoint_time": "2024-05-04 18:47:47.230", "error": null } ++ echo '{' '"upstream_id":' 7365094285229036933, '"namespace":' '"default",' '"id":' '"b687ec0e-b681-4184-bf1b-f1952d61b43a",' '"state":' '"normal",' '"checkpoint_tso":' 449529686846341122, '"checkpoint_time":' '"2024-05-04' '18:47:47.230",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365094285229036933, '"namespace":' '"default",' '"id":' '"b687ec0e-b681-4184-bf1b-f1952d61b43a",' '"state":' '"normal",' '"checkpoint_tso":' 449529686846341122, '"checkpoint_time":' '"2024-05-04' '18:47:47.230",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + tso='449529686571352065 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449529686571352065 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 18:47:47 CST 2024] <<<<<< START cdc server in many_pk_or_uk case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.many_pk_or_uk.29472949.out server --log-file /tmp/tidb_cdc_test/many_pk_or_uk/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/many_pk_or_uk/cdc_data --cluster-id default + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff failed 1-th time, retry later + set +x + tso='449529686789980161 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449529686789980161 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 18:47:48 CST 2024] <<<<<< START cdc server in resolve_lock case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resolve_lock.29442946.out server --log-file /tmp/tidb_cdc_test/resolve_lock/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/resolve_lock/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table cdc_tiflash_test.multi_data_type exists check diff successfully table new_ci_collation_test.t1 exists table new_ci_collation_test.t2 exists table new_ci_collation_test.t3 exists table new_ci_collation_test.t4 not exists for 1-th check, retry later wait process cdc.test exit for 1-th time... check diff successfully check_safepoint_forward http://127.0.0.1:2379 7365094294179641742 449529687018045441 449529686100279298 + set +x + tso='449529686991306753 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449529686991306753 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 18:47:49 CST 2024] <<<<<< START cdc server in default_value case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.default_value.29502952.out server --log-file /tmp/tidb_cdc_test/default_value/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/default_value/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/open_protocol_handle_key_only/run.sh: line 1: 2983 Killed cdc_kafka_consumer --upstream-uri $SINK_URI --downstream-uri="mysql://root@127.0.0.1:3306/?safe-mode=true&batch-dml-enable=false&multi-stmt-enable=false" --upstream-tidb-dsn="root@tcp(${UP_TIDB_HOST}:${UP_TIDB_PORT})/?" --config="$CUR/conf/changefeed.toml" 2>&1 [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] // cache [Pipeline] } [Sat May 4 18:47:50 CST 2024] <<<<<< run test case tiflash success! >>>>>> [Pipeline] // dir run task successfully Post stage [Pipeline] sh ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check_changefeed_state http://127.0.0.1:2379 ea4d934b-0b76-4161-8e6a-0c4d8a31d074 stopped null + endpoints=http://127.0.0.1:2379 + changefeed_id=ea4d934b-0b76-4161-8e6a-0c4d8a31d074 + expected_state=stopped + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c ea4d934b-0b76-4161-8e6a-0c4d8a31d074 -s + info='{ "upstream_id": 7365094294179641742, "namespace": "default", "id": "ea4d934b-0b76-4161-8e6a-0c4d8a31d074", "state": "stopped", "checkpoint_tso": 449529687542071299, "checkpoint_time": "2024-05-04 18:47:49.884", "error": null }' + echo '{ "upstream_id": 7365094294179641742, "namespace": "default", "id": "ea4d934b-0b76-4161-8e6a-0c4d8a31d074", "state": "stopped", "checkpoint_tso": 449529687542071299, "checkpoint_time": "2024-05-04 18:47:49.884", "error": null }' { "upstream_id": 7365094294179641742, "namespace": "default", "id": "ea4d934b-0b76-4161-8e6a-0c4d8a31d074", "state": "stopped", "checkpoint_tso": 449529687542071299, "checkpoint_time": "2024-05-04 18:47:49.884", "error": null } ++ echo '{' '"upstream_id":' 7365094294179641742, '"namespace":' '"default",' '"id":' '"ea4d934b-0b76-4161-8e6a-0c4d8a31d074",' '"state":' '"stopped",' '"checkpoint_tso":' 449529687542071299, '"checkpoint_time":' '"2024-05-04' '18:47:49.884",' '"error":' null '}' ++ jq -r .state + state=stopped + [[ ! stopped == \s\t\o\p\p\e\d ]] ++ echo '{' '"upstream_id":' 7365094294179641742, '"namespace":' '"default",' '"id":' '"ea4d934b-0b76-4161-8e6a-0c4d8a31d074",' '"state":' '"stopped",' '"checkpoint_tso":' 449529687542071299, '"checkpoint_time":' '"2024-05-04' '18:47:49.884",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check_safepoint_equal http://127.0.0.1:2379 7365094294179641742 check diff failed 2-th time, retry later + ls /tmp/tidb_cdc_test/ cov.open_protocol_handle_key_only.cli.2856.out cov.open_protocol_handle_key_only.cli.2952.out open_protocol_handle_key_only sql_res.open_protocol_handle_key_only.txt ++ find /tmp/tidb_cdc_test/ -type f -name '*.log' + tar -cvzf log-G01.tar.gz /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv2.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/down_pd/region-meta/000001.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/down_pd/hot-region/000001.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tidb_other.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv_down/db/000005.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tidb.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_kafka_consumer.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/db/proxy/db/000005.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/log/error.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/log/proxy.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/log/server.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv1/db/000005.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv1.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/pd1/region-meta/000001.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/pd1/hot-region/000001.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv_down.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv3/db/000005.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv3.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tidb_down.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tidb-slow.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv2/db/000005.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/pd1.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/sync_diff_inspector.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/down_pd.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0003/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0007/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0002/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0000/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0001/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0006/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0004/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0005/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/stdout.log tar: Removing leading `/' from member names /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv2.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/down_pd/region-meta/000001.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/down_pd/hot-region/000001.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tidb_other.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv_down/db/000005.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tidb.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_kafka_consumer.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/db/proxy/db/000005.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/log/error.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/log/proxy.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/log/server.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv1/db/000005.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv1.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/pd1/region-meta/000001.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/pd1/hot-region/000001.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv_down.log table new_ci_collation_test.t4 exists table new_ci_collation_test.t5 not exists for 1-th check, retry later /tmp/tidb_cdc_test/open_protocol_handle_key_only/output/sync_diff.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv3/db/000005.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv3.log + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:47:50 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d73326d1-2a02-408e-8384-9d0a0be8c70f {"id":"d73326d1-2a02-408e-8384-9d0a0be8c70f","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819668} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43375b7ad1 d73326d1-2a02-408e-8384-9d0a0be8c70f /tidb/cdc/default/default/upstream/7365094331683306231 {"id":7365094331683306231,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d73326d1-2a02-408e-8384-9d0a0be8c70f {"id":"d73326d1-2a02-408e-8384-9d0a0be8c70f","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819668} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43375b7ad1 d73326d1-2a02-408e-8384-9d0a0be8c70f /tidb/cdc/default/default/upstream/7365094331683306231 {"id":7365094331683306231,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d73326d1-2a02-408e-8384-9d0a0be8c70f {"id":"d73326d1-2a02-408e-8384-9d0a0be8c70f","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819668} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43375b7ad1 d73326d1-2a02-408e-8384-9d0a0be8c70f /tidb/cdc/default/default/upstream/7365094331683306231 {"id":7365094331683306231,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.many_pk_or_uk.cli.2999.out cli changefeed create --start-ts=449529686571352065 '--sink-uri=kafka://127.0.0.1:9092/ticdc-many-pk-or-uk-test-21258?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: 0a0d95ff-60fc-41b6-9854-0fe1ad430b7a Info: {"upstream_id":7365094331683306231,"namespace":"default","id":"0a0d95ff-60fc-41b6-9854-0fe1ad430b7a","sink_uri":"kafka://127.0.0.1:9092/ticdc-many-pk-or-uk-test-21258?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T18:47:51.215314049+08:00","start_ts":449529686571352065,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529686571352065,"checkpoint_ts":449529686571352065,"checkpoint_time":"2024-05-04 18:47:46.181"} PASS table charset_gbk_test0.t0 exists table charset_gbk_test0.t1 exists table charset_gbk_test1.t0 not exists for 1-th check, retry later /tmp/tidb_cdc_test/open_protocol_handle_key_only/tidb_down.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tidb-slow.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/tikv2/db/000005.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/pd1.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/sync_diff_inspector.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/down_pd.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0003/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0007/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0002/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0000/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0001/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0006/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0004/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data/tmp/sorter/0005/000002.log /tmp/tidb_cdc_test/open_protocol_handle_key_only/stdout.log + ls -alh log-G01.tar.gz -rw-r--r--. 1 jenkins jenkins 3.2M May 4 18:47 log-G01.tar.gz [Pipeline] archiveArtifacts Archiving artifacts coverage: 2.4% of statements in github.com/pingcap/tiflow/... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:47:51 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/4b8f75b2-3505-4029-9675-04f1f0f37cd4 {"id":"4b8f75b2-3505-4029-9675-04f1f0f37cd4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819668} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43375a71d2 4b8f75b2-3505-4029-9675-04f1f0f37cd4 /tidb/cdc/default/default/upstream/7365094329879965053 {"id":7365094329879965053,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: Recording fingerprints *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/4b8f75b2-3505-4029-9675-04f1f0f37cd4 {"id":"4b8f75b2-3505-4029-9675-04f1f0f37cd4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819668} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43375a71d2 4b8f75b2-3505-4029-9675-04f1f0f37cd4 /tidb/cdc/default/default/upstream/7365094329879965053 {"id":7365094329879965053,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/4b8f75b2-3505-4029-9675-04f1f0f37cd4 {"id":"4b8f75b2-3505-4029-9675-04f1f0f37cd4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819668} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43375a71d2 4b8f75b2-3505-4029-9675-04f1f0f37cd4 /tidb/cdc/default/default/upstream/7365094329879965053 {"id":7365094329879965053,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resolve_lock.cli.3003.out cli changefeed create --start-ts=449529686789980161 '--sink-uri=kafka://127.0.0.1:9092/ticdc-resolve-lock-test-17723?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // timeout [Pipeline] } Create changefeed successfully! ID: b6fe5473-b791-4e36-9102-1f76f5bd6c4e Info: {"upstream_id":7365094329879965053,"namespace":"default","id":"b6fe5473-b791-4e36-9102-1f76f5bd6c4e","sink_uri":"kafka://127.0.0.1:9092/ticdc-resolve-lock-test-17723?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T18:47:52.087279398+08:00","start_ts":449529686789980161,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529686789980161,"checkpoint_ts":449529686789980161,"checkpoint_time":"2024-05-04 18:47:47.015"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... [Pipeline] // stage [Pipeline] } [Pipeline] // container [Pipeline] } VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cde90d80016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-rfvt7-rz8h5, pid:4147, start at 2024-05-04 18:47:51.128968763 +0800 CST m=+5.291519050 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:51.136 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:51.094 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:51.094 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] } + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 10:47:52 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/da3fbf7d-b638-43bf-9b70-bf2b6256742b {"id":"da3fbf7d-b638-43bf-9b70-bf2b6256742b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819669} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43375bb1d2 da3fbf7d-b638-43bf-9b70-bf2b6256742b /tidb/cdc/default/default/upstream/7365094333931416997 {"id":7365094333931416997,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/da3fbf7d-b638-43bf-9b70-bf2b6256742b {"id":"da3fbf7d-b638-43bf-9b70-bf2b6256742b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819669} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43375bb1d2 da3fbf7d-b638-43bf-9b70-bf2b6256742b /tidb/cdc/default/default/upstream/7365094333931416997 {"id":7365094333931416997,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/da3fbf7d-b638-43bf-9b70-bf2b6256742b {"id":"da3fbf7d-b638-43bf-9b70-bf2b6256742b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-86-g5bf93c6ca","git-hash":"5bf93c6caedff315c4c9650d80e951e31bc88a3d","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714819669} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f43375bb1d2 da3fbf7d-b638-43bf-9b70-bf2b6256742b /tidb/cdc/default/default/upstream/7365094333931416997 {"id":7365094333931416997,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.default_value.cli.3015.out cli changefeed create --start-ts=449529686991306753 '--sink-uri=kafka://127.0.0.1:9092/ticdc-default-value-test-8218?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' [Pipeline] // withEnv [Pipeline] } table charset_gbk_test1.t0 exists table test.finish_mark not exists for 1-th check, retry later [Pipeline] // stage + set +x [Sat May 4 18:47:52 CST 2024] <<<<<< START kafka consumer in many_pk_or_uk case >>>>>> go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading go.uber.org/zap v1.27.0 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading go.uber.org/atomic v1.11.0 go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/coreos/go-semver v0.3.1 go: downloading github.com/go-sql-driver/mysql v1.7.1 [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G01' Create changefeed successfully! ID: 07cd154e-7b26-4658-95b7-bf856fa156df Info: {"upstream_id":7365094333931416997,"namespace":"default","id":"07cd154e-7b26-4658-95b7-bf856fa156df","sink_uri":"kafka://127.0.0.1:9092/ticdc-default-value-test-8218?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T18:47:52.822048517+08:00","start_ts":449529686991306753,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-86-g5bf93c6ca","resolved_ts":449529686991306753,"checkpoint_ts":449529686991306753,"checkpoint_time":"2024-05-04 18:47:47.783"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... Sending interrupt signal to process Killing processes table new_ci_collation_test.t5 exists check diff failed 1-th time, retry later go: downloading golang.org/x/net v0.24.0 go: downloading github.com/golang/protobuf v1.5.4 go: downloading golang.org/x/sys v0.19.0 go: downloading google.golang.org/protobuf v1.33.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda kill finished with exit code 0 Sending interrupt signal to process Killing processes go: downloading golang.org/x/text v0.14.0 + set +x [Sat May 4 18:47:53 CST 2024] <<<<<< START kafka consumer in resolve_lock case >>>>>> go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading go.uber.org/atomic v1.11.0 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/coreos/go-semver v0.3.1 go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/google/uuid v1.6.0 go: downloading github.com/golang/protobuf v1.5.4 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading github.com/google/btree v1.1.2 go: downloading golang.org/x/sync v0.7.0 go: downloading google.golang.org/protobuf v1.33.0 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda run task successfully check diff successfully wait process cdc.test exit for 1-th time... go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading golang.org/x/sys v0.19.0 check_changefeed_state http://127.0.0.1:2379 ea4d934b-0b76-4161-8e6a-0c4d8a31d074 normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=ea4d934b-0b76-4161-8e6a-0c4d8a31d074 + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c ea4d934b-0b76-4161-8e6a-0c4d8a31d074 -s VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cde90d80016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-rfvt7-rz8h5, pid:4147, start at 2024-05-04 18:47:51.128968763 +0800 CST m=+5.291519050 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:51.136 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:51.094 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:51.094 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0cde919c0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1843-rfvt7-rz8h5, pid:4229, start at 2024-05-04 18:47:51.189195794 +0800 CST m=+5.294559476 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-18:49:51.197 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-18:47:51.194 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-18:37:51.194 +0800 All versions after safe point can be accessed. (DO NOT EDIT) go: downloading golang.org/x/net v0.24.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading golang.org/x/text v0.14.0 + set +x [Sat May 4 18:47:54 CST 2024] <<<<<< START kafka consumer in default_value case >>>>>> go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/google/uuid v1.6.0 go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading golang.org/x/time v0.5.0 go: downloading golang.org/x/sync v0.7.0 go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/go-sql-driver/mysql v1.7.1 + info='{ "upstream_id": 7365094294179641742, "namespace": "default", "id": "ea4d934b-0b76-4161-8e6a-0c4d8a31d074", "state": "normal", "checkpoint_tso": 449529687542071299, "checkpoint_time": "2024-05-04 18:47:49.884", "error": null }' + echo '{ "upstream_id": 7365094294179641742, "namespace": "default", "id": "ea4d934b-0b76-4161-8e6a-0c4d8a31d074", "state": "normal", "checkpoint_tso": 449529687542071299, "checkpoint_time": "2024-05-04 18:47:49.884", "error": null }' { "upstream_id": 7365094294179641742, "namespace": "default", "id": "ea4d934b-0b76-4161-8e6a-0c4d8a31d074", "state": "normal", "checkpoint_tso": 449529687542071299, "checkpoint_time": "2024-05-04 18:47:49.884", "error": null } ++ echo '{' '"upstream_id":' 7365094294179641742, '"namespace":' '"default",' '"id":' '"ea4d934b-0b76-4161-8e6a-0c4d8a31d074",' '"state":' '"normal",' '"checkpoint_tso":' 449529687542071299, '"checkpoint_time":' '"2024-05-04' '18:47:49.884",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365094294179641742, '"namespace":' '"default",' '"id":' '"ea4d934b-0b76-4161-8e6a-0c4d8a31d074",' '"state":' '"normal",' '"checkpoint_tso":' 449529687542071299, '"checkpoint_time":' '"2024-05-04' '18:47:49.884",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check_safepoint_forward http://127.0.0.1:2379 7365094294179641742 449529687542071298 449529687542071299 wait process cdc.test exit for 2-th time... go: downloading go.uber.org/atomic v1.11.0 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading github.com/coreos/go-semver v0.3.1 table test.finish_mark not exists for 2-th check, retry later script returned exit code 143 Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/ddl_puller_lag/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/ddl_puller_lag/tiflash/log/error.log go: downloading golang.org/x/sys v0.19.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading golang.org/x/net v0.24.0 go: downloading github.com/golang/protobuf v1.5.4 go: downloading google.golang.org/protobuf v1.33.0 go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/ddl_puller_lag/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/ddl_puller_lag/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/ddl_puller_lag/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 18:47:54 CST 2024] <<<<<< run test case processor_err_chan success! >>>>>> go: downloading golang.org/x/text v0.14.0 go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading golang.org/x/sync v0.7.0 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading github.com/tidwall/btree v1.7.0 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/stretchr/testify v1.9.0 go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading github.com/google/uuid v1.6.0 go: downloading github.com/google/btree v1.1.2 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading golang.org/x/tools v0.20.0 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading cloud.google.com/go v0.112.2 go: downloading google.golang.org/api v0.170.0 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading golang.org/x/time v0.5.0 go: downloading github.com/golang/snappy v0.0.4 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading golang.org/x/crypto v0.22.0 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading github.com/jfcg/sixb v1.3.8 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/otiai10/copy v1.2.0 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/kr/pretty v0.3.1 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/robfig/cron v1.2.0 go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c run task successfully check diff failed 2-th time, retry later go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading github.com/apache/thrift v0.16.0 go: downloading github.com/kr/text v0.2.0 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da check_changefeed_state http://127.0.0.1:2379 ea4d934b-0b76-4161-8e6a-0c4d8a31d074 stopped null + endpoints=http://127.0.0.1:2379 + changefeed_id=ea4d934b-0b76-4161-8e6a-0c4d8a31d074 + expected_state=stopped + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c ea4d934b-0b76-4161-8e6a-0c4d8a31d074 -s go: downloading github.com/go-logr/logr v1.4.1 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/go-logr/stdr v1.2.2 go: downloading github.com/DataDog/zstd v1.5.5 go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 + info='{ "upstream_id": 7365094294179641742, "namespace": "default", "id": "ea4d934b-0b76-4161-8e6a-0c4d8a31d074", "state": "stopped", "checkpoint_tso": 449529687542071299, "checkpoint_time": "2024-05-04 18:47:49.884", "error": null }' + echo '{ "upstream_id": 7365094294179641742, "namespace": "default", "id": "ea4d934b-0b76-4161-8e6a-0c4d8a31d074", "state": "stopped", "checkpoint_tso": 449529687542071299, "checkpoint_time": "2024-05-04 18:47:49.884", "error": null }' { "upstream_id": 7365094294179641742, "namespace": "default", "id": "ea4d934b-0b76-4161-8e6a-0c4d8a31d074", "state": "stopped", "checkpoint_tso": 449529687542071299, "checkpoint_time": "2024-05-04 18:47:49.884", "error": null } ++ echo '{' '"upstream_id":' 7365094294179641742, '"namespace":' '"default",' '"id":' '"ea4d934b-0b76-4161-8e6a-0c4d8a31d074",' '"state":' '"stopped",' '"checkpoint_tso":' 449529687542071299, '"checkpoint_time":' '"2024-05-04' '18:47:49.884",' '"error":' null '}' ++ jq -r .state + state=stopped + [[ ! stopped == \s\t\o\p\p\e\d ]] ++ echo '{' '"upstream_id":' 7365094294179641742, '"namespace":' '"default",' '"id":' '"ea4d934b-0b76-4161-8e6a-0c4d8a31d074",' '"state":' '"stopped",' '"checkpoint_tso":' 449529687542071299, '"checkpoint_time":' '"2024-05-04' '18:47:49.884",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check_changefeed_state http://127.0.0.1:2379 87cf0816-a0f8-40cf-bb1a-9cc43a4b2f5c normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=87cf0816-a0f8-40cf-bb1a-9cc43a4b2f5c + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 87cf0816-a0f8-40cf-bb1a-9cc43a4b2f5c -s go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/stretchr/testify v1.9.0 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/golang/snappy v0.0.4 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading golang.org/x/tools v0.20.0 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading github.com/tidwall/btree v1.7.0 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading golang.org/x/time v0.5.0 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading github.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading google.golang.org/api v0.170.0 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/vbauerster/mpb/v7 v7.5.3 go: downloading golang.org/x/term v0.19.0 go: downloading github.com/spf13/cobra v1.8.0 go: downloading cloud.google.com/go v0.112.2 go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/robfig/cron v1.2.0 go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading github.com/jfcg/sixb v1.3.8 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading golang.org/x/crypto v0.22.0 go: downloading github.com/apache/thrift v0.16.0 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/Masterminds/semver v1.5.0 go: downloading k8s.io/api v0.28.6 go: downloading github.com/emirpasic/gods v1.18.1 go: downloading github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading github.com/kr/text v0.2.0 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 + info='{ "upstream_id": 7365094294179641742, "namespace": "default", "id": "87cf0816-a0f8-40cf-bb1a-9cc43a4b2f5c", "state": "normal", "checkpoint_tso": 449529689114935297, "checkpoint_time": "2024-05-04 18:47:55.884", "error": null }' + echo '{ "upstream_id": 7365094294179641742, "namespace": "default", "id": "87cf0816-a0f8-40cf-bb1a-9cc43a4b2f5c", "state": "normal", "checkpoint_tso": 449529689114935297, "checkpoint_time": "2024-05-04 18:47:55.884", "error": null }' { "upstream_id": 7365094294179641742, "namespace": "default", "id": "87cf0816-a0f8-40cf-bb1a-9cc43a4b2f5c", "state": "normal", "checkpoint_tso": 449529689114935297, "checkpoint_time": "2024-05-04 18:47:55.884", "error": null } ++ echo '{' '"upstream_id":' 7365094294179641742, '"namespace":' '"default",' '"id":' '"87cf0816-a0f8-40cf-bb1a-9cc43a4b2f5c",' '"state":' '"normal",' '"checkpoint_tso":' 449529689114935297, '"checkpoint_time":' '"2024-05-04' '18:47:55.884",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365094294179641742, '"namespace":' '"default",' '"id":' '"87cf0816-a0f8-40cf-bb1a-9cc43a4b2f5c",' '"state":' '"normal",' '"checkpoint_tso":' 449529689114935297, '"checkpoint_time":' '"2024-05-04' '18:47:55.884",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check_safepoint_equal http://127.0.0.1:2379 7365094294179641742 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading github.com/go-logr/logr v1.4.1 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/go-logr/stdr v1.2.2 go: downloading github.com/DataDog/zstd v1.5.5 go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 go: downloading k8s.io/apimachinery v0.28.6 go: downloading gopkg.in/inf.v0 v0.9.1 go: downloading k8s.io/klog/v2 v2.120.1 go: downloading github.com/google/gofuzz v1.2.0 go: downloading sigs.k8s.io/structured-merge-diff/v4 v4.4.1 go: downloading sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd go: downloading k8s.io/utils v0.0.0-20230726121419-3b25d923346b go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading github.com/coocood/freecache v1.2.1 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/gorilla/mux v1.8.0 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/tidwall/btree v1.7.0 go: downloading github.com/stretchr/testify v1.9.0 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/google/btree v1.1.2 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading cloud.google.com/go v0.112.2 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading google.golang.org/api v0.170.0 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading golang.org/x/tools v0.20.0 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/golang/snappy v0.0.4 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading golang.org/x/crypto v0.22.0 go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading github.com/dolthub/maphash v0.1.0 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/jfcg/sixb v1.3.8 go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading github.com/robfig/cron v1.2.0 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading github.com/kr/text v0.2.0 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading github.com/apache/thrift v0.16.0 go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading github.com/go-logr/logr v1.4.1 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/go-logr/stdr v1.2.2 table test.finish_mark not exists for 3-th check, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_puller_lag.cli.5632.out cli tso query --pd=http://127.0.0.1:2379 check diff failed 3-th time, retry later go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 go: downloading github.com/DataDog/zstd v1.5.5 kill finished with exit code 0 Sending interrupt signal to process Killing processes + set +x + tso='449529689445236740 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449529689445236740 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x script returned exit code 143 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading github.com/google/s2a-go v0.1.7 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading github.com/felixge/httpsnoop v1.0.4 table test.finish_mark not exists for 4-th check, retry later go: downloading github.com/jmespath/go-jmespath v0.4.0 check diff failed 4-th time, retry later run task successfully kill finished with exit code 0 Sending interrupt signal to process Killing processes Changefeed remove successfully. ID: ea4d934b-0b76-4161-8e6a-0c4d8a31d074 CheckpointTs: 449529687542071299 SinkURI: kafka://127.0.0.1:9092/ticdc-gc-safepoint-16440?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 check_safepoint_forward http://127.0.0.1:2379 7365094294179641742 449529687542071298 449529689114935297 449529687542071299 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/google/s2a-go v0.1.7 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading github.com/felixge/httpsnoop v1.0.4 go: downloading github.com/jmespath/go-jmespath v0.4.0 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd kill finished with exit code 0 Sending interrupt signal to process Killing processes script returned exit code 143 {"level":"warn","ts":1714819680.1524463,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00212fc00/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: EOF"} script returned exit code 143 kill finished with exit code 0 Sending interrupt signal to process Killing processes go: downloading github.com/jmespath/go-jmespath v0.4.0 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading github.com/google/s2a-go v0.1.7 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading github.com/felixge/httpsnoop v1.0.4 kill finished with exit code 0 Sending interrupt signal to process Killing processes run task successfully kill finished with exit code 0 Sending interrupt signal to process Killing processes kill finished with exit code 0 Sending interrupt signal to process Killing processes {"level":"warn","ts":1714819680.9663785,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00232ee00/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: EOF"} script returned exit code 143 script returned exit code 143 go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd [2024/05/04 18:48:01.502 +08:00] [ERROR] [request.go:310] ["failed to send a http request"] [error="Get \"http://127.0.0.1:8300/api/v2/changefeeds/87cf0816-a0f8-40cf-bb1a-9cc43a4b2f5c?namespace=default\": context canceled"] Changefeed remove failed. ID: 87cf0816-a0f8-40cf-bb1a-9cc43a4b2f5c Error: Get "http://127.0.0.1:8300/api/v2/changefeeds/87cf0816-a0f8-40cf-bb1a-9cc43a4b2f5c?namespace=default": context canceled Error: Get "http://127.0.0.1:8300/api/v2/changefeeds/87cf0816-a0f8-40cf-bb1a-9cc43a4b2f5c?namespace=default": context canceled script returned exit code 143 check diff failed 5-th time, retry later kill finished with exit code 0 Sending interrupt signal to process Killing processes {"level":"warn","ts":1714819681.8287792,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001fbda40/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: EOF"} script returned exit code 143 script returned exit code 143 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/region_merge/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... check diff successfully check diff failed 1-th time, retry later kill finished with exit code 0 Sending interrupt signal to process Killing processes start tidb cluster in /tmp/tidb_cdc_test/region_merge Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... check diff successfully wait process cdc.test exit for 1-th time... kill finished with exit code 0 Sending interrupt signal to process Killing processes script returned exit code 143 {"level":"warn","ts":1714819686.6428578,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0022a9880/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: EOF"} script returned exit code 143 kill finished with exit code 0 Sending interrupt signal to process Killing processes wait process cdc.test exit for 2-th time... script returned exit code 143 kill finished with exit code 0 Sending interrupt signal to process Killing processes cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 18:48:07 CST 2024] <<<<<< run test case new_ci_collation success! >>>>>> script returned exit code 143 kill finished with exit code 0 script returned exit code 143 [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G00' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G02' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G04' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G05' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G06' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G07' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G08' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G09' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G10' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G11' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G12' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G13' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G14' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G15' [Pipeline] // parallel [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] End of Pipeline ERROR: script returned exit code 1 Finished: FAILURE