Started by user Jenkins Admin Obtained pipelines/pingcap/tiflow/latest/pull_cdc_integration_kafka_test.groovy from git https://github.com/PingCAP-QE/ci.git Loading library tipipeline@main Library tipipeline@main is cached. Copying from home. [Pipeline] Start of Pipeline [Pipeline] readJSON [Pipeline] readTrusted Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1836-2grj7-xc4w2 Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1836-2grj7-xc4w2 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1836-2grj7-2cs3x --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "7968dd17390e29a73f34712ef02bc2f441186117" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1836-2grj7" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-2grj7-xc4w2" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-2grj7-xc4w2" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-2grj7-xc4w2" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1836-2grj7-xc4w2 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] stage [Pipeline] { (Declarative: Checkout SCM) [Pipeline] checkout The recommended git tool is: git No credentials specified Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git rev-list --no-walk 03312178c534dce949face80c69812d989e55009 # timeout=10 [Pipeline] } [Pipeline] // stage [Pipeline] withEnv [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] timeout Timeout set to expire in 1 hr 5 min [Pipeline] { [Pipeline] stage [Pipeline] { (Debug info) [Pipeline] sh + printenv PROW_JOB_ID=8011d7ec-925e-42c5-a960-c8067df92ded JENKINS_NODE_COOKIE=2deac3b5-c16e-4118-8390-a72b97e9008d BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-2grj7-xc4w2 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Debug info BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786675747499282435","prowjobid":"8011d7ec-925e-42c5-a960-c8067df92ded","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/c950cce3a9b105fd95bb2c788e1ab69ec32e0668","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct _=/usr/bin/printenv POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test HUDSON_URL=https://do.pingcap.net/jenkins/ JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1836 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786675747499282435 GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=tests SHLVL=3 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-2grj7 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-236df335481f9578f70eb859f68d5ceead3aa27f6c9385fda1ec4c08661c0305 NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-2grj7-xc4w2 pingcap_tiflow_pull_cdc_integration_kafka_test_1836-2grj7 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-2grj7-xc4w2 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1836 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz + echo ------------------------- ------------------------- + go env GO111MODULE='' GOARCH='amd64' GOBIN='' GOCACHE='/home/jenkins/.cache/go-build' GOENV='/home/jenkins/.config/go/env' GOEXE='' GOEXPERIMENT='' GOFLAGS='' GOHOSTARCH='amd64' GOHOSTOS='linux' GOINSECURE='' GOMODCACHE='/go/pkg/mod' GONOPROXY='' GONOSUMDB='' GOOS='linux' GOPATH='/go' GOPRIVATE='' GOPROXY='http://goproxy.apps.svc,https://proxy.golang.org,direct' GOROOT='/usr/local/go' GOSUMDB='sum.golang.org' GOTMPDIR='' GOTOOLCHAIN='auto' GOTOOLDIR='/usr/local/go/pkg/tool/linux_amd64' GOVCS='' GOVERSION='go1.21.0' GCCGO='gccgo' GOAMD64='v1' AR='ar' CC='gcc' CXX='g++' CGO_ENABLED='1' GOMOD='/dev/null' GOWORK='' CGO_CFLAGS='-O2 -g' CGO_CPPFLAGS='' CGO_CXXFLAGS='-O2 -g' CGO_FFLAGS='-O2 -g' CGO_LDFLAGS='-O2 -g' PKG_CONFIG='pkg-config' GOGCCFLAGS='-fPIC -m64 -pthread -Wl,--no-gc-sections -fmessage-length=0 -fdebug-prefix-map=/tmp/go-build1398336748=/tmp/go-build -gno-record-gcc-switches' + echo ------------------------- ------------------------- + echo 'debug command: kubectl -n jenkins-tiflow exec -ti pingcap-tiflow-pull-cdc-integration-kafka-test-1836-2grj7-xc4w2 bash' debug command: kubectl -n jenkins-tiflow exec -ti pingcap-tiflow-pull-cdc-integration-kafka-test-1836-2grj7-xc4w2 bash [Pipeline] container [Pipeline] { [Pipeline] sh + dig github.com ; <<>> DiG 9.18.16 <<>> github.com ;; global options: +cmd ;; Got answer: ;; ->>HEADER<<- opcode: QUERY, status: NOERROR, id: 48816 ;; flags: qr aa rd ra; QUERY: 1, ANSWER: 1, AUTHORITY: 0, ADDITIONAL: 1 ;; OPT PSEUDOSECTION: ; EDNS: version: 0, flags:; udp: 1232 ; COOKIE: e056c7b9cb0b3ebb (echoed) ;; QUESTION SECTION: ;github.com. IN A ;; ANSWER SECTION: github.com. 19 IN A 20.205.243.166 ;; Query time: 0 msec ;; SERVER: 169.254.25.10#53(169.254.25.10) (UDP) ;; WHEN: Sat May 04 08:35:04 UTC 2024 ;; MSG SIZE rcvd: 77 [Pipeline] script [Pipeline] { [Pipeline] } [Pipeline] // script [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Check diff files) [Pipeline] container [Pipeline] { [Pipeline] script [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $token [Pipeline] { [Pipeline] httpRequest Warning: A secret was passed to "httpRequest" using Groovy String interpolation, which is insecure. Affected argument(s) used the following variable(s): [token] See https://jenkins.io/redirect/groovy-string-interpolation for details. HttpMethod: GET URL: https://api.github.com/repos/pingcap/tiflow/pulls/10919/files?page=1&per_page=100 Content-Type: application/json Authorization: ***** Sending request to url: https://api.github.com/repos/pingcap/tiflow/pulls/10919/files?page=1&per_page=100 Response Code: HTTP/1.1 200 OK Success: Status code 200 is in the accepted range: 100:399 [Pipeline] httpRequest Warning: A secret was passed to "httpRequest" using Groovy String interpolation, which is insecure. Affected argument(s) used the following variable(s): [token] See https://jenkins.io/redirect/groovy-string-interpolation for details. HttpMethod: GET URL: https://api.github.com/repos/pingcap/tiflow/pulls/10919/files?page=2&per_page=100 Content-Type: application/json Authorization: ***** Sending request to url: https://api.github.com/repos/pingcap/tiflow/pulls/10919/files?page=2&per_page=100 Response Code: HTTP/1.1 200 OK Success: Status code 200 is in the accepted range: 100:399 [Pipeline] } [Pipeline] // withCredentials [Pipeline] echo pr_diff_files: [cdc/model/kv.go, cdc/model/sink.go, cdc/model/sink_test.go, cdc/processor/processor.go, cdc/processor/sinkmanager/manager.go, cdc/processor/sourcemanager/manager.go, cdc/redo/reader/reader.go, cdc/sink/dmlsink/factory/factory.go, cdc/sink/dmlsink/txn/mysql/dml.go, cdc/sink/dmlsink/txn/mysql/mysql.go, cdc/sink/dmlsink/txn/mysql/mysql_test.go, cmd/kafka-consumer/main.go, cmd/pulsar-consumer/main.go, cmd/storage-consumer/main.go, errors.toml, pkg/applier/redo.go, pkg/applier/redo_test.go, pkg/errors/cdc_errors.go, pkg/errors/helper.go, tests/integration_tests/_utils/check_sync_diff, tests/integration_tests/changefeed_dup_error_restart/conf/diff_config.toml, tests/integration_tests/changefeed_dup_error_restart/conf/workload, tests/integration_tests/changefeed_dup_error_restart/run.sh, tests/integration_tests/force_replicate_table/run.sh, tests/integration_tests/open_protocol_handle_key_only/data/data.sql, tests/integration_tests/open_protocol_handle_key_only/run.sh, tests/integration_tests/run_group.sh] [Pipeline] echo diff file not matched: cdc/model/kv.go [Pipeline] } [Pipeline] // script [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Checkout) [Pipeline] timeout Timeout set to expire in 10 min [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache Cache restored successfully (git/pingcap/tiflow/rev-be15534) 203635712 bytes in 1.34 secs (151542330 bytes/sec) [Pipeline] { [Pipeline] retry [Pipeline] { [Pipeline] script [Pipeline] { [Pipeline] sh git version 2.36.6 Reinitialized existing Git repository in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/.git/ .git HEAD is now at be1553484 codec(ticdc): avro simplify the unit test (#11010) POST git-upload-pack (656 bytes) POST git-upload-pack (973 bytes) From https://github.com/pingcap/tiflow = [up to date] master -> origin/master * [new ref] refs/pull/10919/head -> origin/pr/10919/head HEAD is now at be1553484 codec(ticdc): avro simplify the unit test (#11010) ๐Ÿšง Checkouting to base SHA:be1553484fe4c03594eabb8d7435c694e5fd7224... HEAD is now at be1553484 codec(ticdc): avro simplify the unit test (#11010) โœ… Checked. ๐ŸŽ‰ ๐Ÿงพ HEAD info: be1553484fe4c03594eabb8d7435c694e5fd7224 be1553484 codec(ticdc): avro simplify the unit test (#11010) 2a7a65c6f Support Sequences (#10203) 36e9e1bf6 cli(ticdc): allow client authentication to be enabled without tls (#11005) ๐Ÿšง Pre-merge heads of pull requests to base SHA: be1553484fe4c03594eabb8d7435c694e5fd7224 ... Updating be1553484..c950cce3a Fast-forward cdc/model/kv.go | 5 + cdc/model/sink.go | 38 ++- cdc/model/sink_test.go | 9 +- cdc/processor/processor.go | 21 +- cdc/processor/sinkmanager/manager.go | 5 + cdc/processor/sourcemanager/manager.go | 66 +++- cdc/redo/reader/reader.go | 21 +- cdc/sink/dmlsink/factory/factory.go | 8 +- cdc/sink/dmlsink/txn/mysql/dml.go | 7 + cdc/sink/dmlsink/txn/mysql/mysql.go | 87 +++--- cdc/sink/dmlsink/txn/mysql/mysql_test.go | 2 +- cmd/kafka-consumer/main.go | 4 +- cmd/pulsar-consumer/main.go | 17 +- cmd/storage-consumer/main.go | 4 +- errors.toml | 5 + pkg/applier/redo.go | 303 +++++++++++++++++- pkg/applier/redo_test.go | 347 ++++++++++++++++++++- pkg/errors/cdc_errors.go | 4 + pkg/errors/helper.go | 19 ++ tests/integration_tests/_utils/check_sync_diff | 2 +- .../conf/diff_config.toml | 29 ++ .../changefeed_dup_error_restart/conf/workload | 13 + .../changefeed_dup_error_restart/run.sh | 54 ++++ .../integration_tests/force_replicate_table/run.sh | 4 +- .../open_protocol_handle_key_only/data/data.sql | 2 +- .../open_protocol_handle_key_only/run.sh | 2 + tests/integration_tests/run_group.sh | 5 +- 27 files changed, 980 insertions(+), 103 deletions(-) create mode 100644 tests/integration_tests/changefeed_dup_error_restart/conf/diff_config.toml create mode 100644 tests/integration_tests/changefeed_dup_error_restart/conf/workload create mode 100755 tests/integration_tests/changefeed_dup_error_restart/run.sh ๐Ÿงพ Pre-merged result: c950cce3a9b105fd95bb2c788e1ab69ec32e0668 c950cce3a f 4d7b2cab4 f b673ef40a fix โœ… Pre merged ๐ŸŽ‰ โœ… ~~~~~All done.~~~~~~ [Pipeline] } [Pipeline] // script [Pipeline] } [Pipeline] // retry [Pipeline] } Cache not saved (git/pingcap/tiflow/rev-be15534-c950cce already exists) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (prepare) [Pipeline] timeout Timeout set to expire in 20 min [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/third_party_download [Pipeline] { [Pipeline] retry [Pipeline] { [Pipeline] sh + cd ../tiflow + ./scripts/download-integration-test-binaries.sh master Download binaries... % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 1102 0 --:--:-- --:--:-- --:--:-- 1138 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 788 0 --:--:-- --:--:-- --:--:-- 803 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 1925 0 --:--:-- --:--:-- --:--:-- 1952 % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 41 100 41 0 0 2315 0 --:--:-- --:--:-- --:--:-- 2411 >>> download tidb-server.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/tidb/600b2ed4bf0aa38224a1c4c4c68831820735515c/centos7/tidb-server.tar.gz 2024-05-04 16:35:23 URL:http://fileserver.pingcap.net/download/builds/pingcap/tidb/600b2ed4bf0aa38224a1c4c4c68831820735515c/centos7/tidb-server.tar.gz [536570515/536570515] -> "tmp/tidb-server.tar.gz" [1] >>> download pd-server.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/pd/1679dbca25b3483d1375c7e747da27e99ad77360/centos7/pd-server.tar.gz 2024-05-04 16:35:34 URL:http://fileserver.pingcap.net/download/builds/pingcap/pd/1679dbca25b3483d1375c7e747da27e99ad77360/centos7/pd-server.tar.gz [187372022/187372022] -> "tmp/pd-server.tar.gz" [1] >>> download tikv-server.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/tikv/72a0fd5b00235a7c56014b77ddd933e2a0d33c88/centos7/tikv-server.tar.gz 2024-05-04 16:35:50 URL:http://fileserver.pingcap.net/download/builds/pingcap/tikv/72a0fd5b00235a7c56014b77ddd933e2a0d33c88/centos7/tikv-server.tar.gz [919098782/919098782] -> "tmp/tikv-server.tar.gz" [1] >>> download tiflash.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/tiflash/master/8e170090fad91c94bef8d908e21c195c1d145b02/centos7/tiflash.tar.gz 2024-05-04 16:36:06 URL:http://fileserver.pingcap.net/download/builds/pingcap/tiflash/master/8e170090fad91c94bef8d908e21c195c1d145b02/centos7/tiflash.tar.gz [456057803/456057803] -> "tmp/tiflash.tar.gz" [1] >>> download minio.tar.gz from http://fileserver.pingcap.net/download/minio.tar.gz 2024-05-04 16:36:11 URL:http://fileserver.pingcap.net/download/minio.tar.gz [17718777/17718777] -> "tmp/minio.tar.gz" [1] >>> download go-ycsb from http://fileserver.pingcap.net/download/builds/pingcap/go-ycsb/test-br/go-ycsb 2024-05-04 16:36:13 URL:http://fileserver.pingcap.net/download/builds/pingcap/go-ycsb/test-br/go-ycsb [45975512/45975512] -> "third_bin/go-ycsb" [1] >>> download jq from http://fileserver.pingcap.net/download/builds/pingcap/test/jq-1.6/jq-linux64 2024-05-04 16:36:13 URL:http://fileserver.pingcap.net/download/builds/pingcap/test/jq-1.6/jq-linux64 [3953824/3953824] -> "third_bin/jq" [1] >>> download etcd.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/cdc/etcd-v3.4.7-linux-amd64.tar.gz 2024-05-04 16:36:13 URL:http://fileserver.pingcap.net/download/builds/pingcap/cdc/etcd-v3.4.7-linux-amd64.tar.gz [17310840/17310840] -> "tmp/etcd.tar.gz" [1] >>> download sync_diff_inspector.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/cdc/sync_diff_inspector_hash-d671b084_linux-amd64.tar.gz 2024-05-04 16:36:15 URL:http://fileserver.pingcap.net/download/builds/pingcap/cdc/sync_diff_inspector_hash-d671b084_linux-amd64.tar.gz [79877126/79877126] -> "tmp/sync_diff_inspector.tar.gz" [1] >>> download schema-registry.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/cdc/schema-registry.tar.gz 2024-05-04 16:36:20 URL:http://fileserver.pingcap.net/download/builds/pingcap/cdc/schema-registry.tar.gz [278386006/278386006] -> "tmp/schema-registry.tar.gz" [1] Download SUCCESS + ls -alh ./bin total 1.9G drwxr-sr-x. 6 jenkins jenkins 4.0K May 4 16:36 . drwxr-sr-x. 19 jenkins jenkins 4.0K May 4 16:36 .. drwxr-sr-x. 2 jenkins jenkins 4.0K May 19 2023 bin drwxr-sr-x. 4 jenkins jenkins 4.0K May 10 2023 etc -rwxr-xr-x. 1 jenkins jenkins 17M Apr 2 2020 etcdctl -rwxr-xr-x. 1 jenkins jenkins 44M May 4 16:36 go-ycsb -rwxr-xr-x. 1 jenkins jenkins 3.8M May 4 16:36 jq drwxr-sr-x. 3 jenkins jenkins 4.0K May 10 2023 lib lrwxrwxrwx. 1 jenkins jenkins 13 Apr 30 11:15 libc++.so.1 -> libc++.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 1016K Nov 7 01:00 libc++.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 16 Apr 30 11:15 libc++abi.so.1 -> libc++abi.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 358K Nov 7 01:00 libc++abi.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 13 Apr 30 11:15 libgmssl.so -> libgmssl.so.3 lrwxrwxrwx. 1 jenkins jenkins 15 Apr 30 11:15 libgmssl.so.3 -> libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 2.6M Apr 30 10:34 libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 272M Apr 30 11:16 libtiflash_proxy.so -rwxr-xr-x. 1 jenkins jenkins 50M Jul 29 2020 minio -rwxr-xr-x. 1 jenkins jenkins 37M Apr 30 16:11 pd-api-bench -rwxr-xr-x. 1 jenkins jenkins 44M Apr 30 16:10 pd-ctl -rwxr-xr-x. 1 jenkins jenkins 36M Apr 30 16:10 pd-heartbeat-bench -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:10 pd-recover -rwxr-xr-x. 1 jenkins jenkins 106M Apr 30 16:10 pd-server -rwxr-xr-x. 1 jenkins jenkins 26M Apr 30 16:10 pd-tso-bench -rwxr-xr-x. 1 jenkins jenkins 3.0M Apr 30 16:11 pd-ut -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:10 regions-dump drwxr-sr-x. 4 jenkins jenkins 4.0K May 10 2023 share -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:11 stores-dump -rwxr-xr-x. 1 jenkins jenkins 192M Sep 22 2023 sync_diff_inspector -rwxr-xr-x. 1 jenkins jenkins 208M May 1 10:57 tidb-server -rwxr-xr-x. 1 jenkins jenkins 380M Apr 30 11:15 tiflash -rwxr-xr-x. 1 jenkins jenkins 418M Apr 30 11:29 tikv-server -rwxr-xr-x. 1 jenkins jenkins 2.0M Apr 30 16:11 xprog + make check_third_party_binary /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/tidb-server /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/tikv-server /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/pd-server /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/tiflash /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/pd-ctl /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/sync_diff_inspector /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/go-ycsb /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/etcdctl /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/jq /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/minio /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/bin/schema-registry-start + cd - /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/third_party_download + mkdir -p bin + mv ../tiflow/bin/bin ../tiflow/bin/etc ../tiflow/bin/etcdctl ../tiflow/bin/go-ycsb ../tiflow/bin/jq ../tiflow/bin/lib ../tiflow/bin/libc++.so.1 ../tiflow/bin/libc++.so.1.0 ../tiflow/bin/libc++abi.so.1 ../tiflow/bin/libc++abi.so.1.0 ../tiflow/bin/libgmssl.so ../tiflow/bin/libgmssl.so.3 ../tiflow/bin/libgmssl.so.3.0 ../tiflow/bin/libtiflash_proxy.so ../tiflow/bin/minio ../tiflow/bin/pd-api-bench ../tiflow/bin/pd-ctl ../tiflow/bin/pd-heartbeat-bench ../tiflow/bin/pd-recover ../tiflow/bin/pd-server ../tiflow/bin/pd-tso-bench ../tiflow/bin/pd-ut ../tiflow/bin/regions-dump ../tiflow/bin/share ../tiflow/bin/stores-dump ../tiflow/bin/sync_diff_inspector ../tiflow/bin/tidb-server ../tiflow/bin/tiflash ../tiflow/bin/tikv-server ../tiflow/bin/xprog ./bin/ + ls -alh ./bin total 1.9G drwxr-sr-x. 6 jenkins jenkins 4.0K May 4 16:36 . drwxr-sr-x. 3 jenkins jenkins 4.0K May 4 16:36 .. drwxr-sr-x. 2 jenkins jenkins 4.0K May 19 2023 bin drwxr-sr-x. 4 jenkins jenkins 4.0K May 10 2023 etc -rwxr-xr-x. 1 jenkins jenkins 17M Apr 2 2020 etcdctl -rwxr-xr-x. 1 jenkins jenkins 44M May 4 16:36 go-ycsb -rwxr-xr-x. 1 jenkins jenkins 3.8M May 4 16:36 jq drwxr-sr-x. 3 jenkins jenkins 4.0K May 10 2023 lib lrwxrwxrwx. 1 jenkins jenkins 13 Apr 30 11:15 libc++.so.1 -> libc++.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 1016K Nov 7 01:00 libc++.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 16 Apr 30 11:15 libc++abi.so.1 -> libc++abi.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 358K Nov 7 01:00 libc++abi.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 13 Apr 30 11:15 libgmssl.so -> libgmssl.so.3 lrwxrwxrwx. 1 jenkins jenkins 15 Apr 30 11:15 libgmssl.so.3 -> libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 2.6M Apr 30 10:34 libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 272M Apr 30 11:16 libtiflash_proxy.so -rwxr-xr-x. 1 jenkins jenkins 50M Jul 29 2020 minio -rwxr-xr-x. 1 jenkins jenkins 37M Apr 30 16:11 pd-api-bench -rwxr-xr-x. 1 jenkins jenkins 44M Apr 30 16:10 pd-ctl -rwxr-xr-x. 1 jenkins jenkins 36M Apr 30 16:10 pd-heartbeat-bench -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:10 pd-recover -rwxr-xr-x. 1 jenkins jenkins 106M Apr 30 16:10 pd-server -rwxr-xr-x. 1 jenkins jenkins 26M Apr 30 16:10 pd-tso-bench -rwxr-xr-x. 1 jenkins jenkins 3.0M Apr 30 16:11 pd-ut -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:10 regions-dump drwxr-sr-x. 4 jenkins jenkins 4.0K May 10 2023 share -rwxr-xr-x. 1 jenkins jenkins 32M Apr 30 16:11 stores-dump -rwxr-xr-x. 1 jenkins jenkins 192M Sep 22 2023 sync_diff_inspector -rwxr-xr-x. 1 jenkins jenkins 208M May 1 10:57 tidb-server -rwxr-xr-x. 1 jenkins jenkins 380M Apr 30 11:15 tiflash -rwxr-xr-x. 1 jenkins jenkins 418M Apr 30 11:29 tikv-server -rwxr-xr-x. 1 jenkins jenkins 2.0M Apr 30 16:11 xprog + ./bin/tidb-server -V Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore + ./bin/pd-server -V Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 + ./bin/tikv-server -V TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + ./bin/tiflash --version TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored + ./bin/sync_diff_inspector --version App Name: sync_diff_inspector v2.0 Release Version: v7.4.0 Git Commit Hash: d671b0840063bc2532941f02e02e12627402844c Git Branch: heads/refs/tags/v7.4.0 UTC Build Time: 2023-09-22 03:51:56 Go Version: go1.21.1 [Pipeline] } [Pipeline] // retry [Pipeline] } [Pipeline] // dir [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache Cache not restored (no such key found) [Pipeline] { [Pipeline] sh + ls -alh ./bin total 8.0K drwxr-sr-x. 2 jenkins jenkins 4.0K May 4 16:36 . drwxr-sr-x. 19 jenkins jenkins 4.0K May 4 16:36 .. + '[' -f ./bin/cdc ']' + make cdc CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-79-gc950cce3a" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 08:36:24" -X "github.com/pingcap/tiflow/pkg/version.GitHash=c950cce3a9b105fd95bb2c788e1ab69ec32e0668" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-79-gc950cce3a"' -o bin/cdc ./cmd/cdc go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading github.com/spf13/cobra v1.8.0 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading golang.org/x/sync v0.7.0 go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/coreos/go-semver v0.3.1 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading golang.org/x/net v0.24.0 go: downloading github.com/IBM/sarama v1.41.2 go: downloading github.com/gin-gonic/gin v1.9.1 go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading github.com/xdg/scram v1.0.5 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/tinylib/msgp v1.1.6 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/DATA-DOG/go-sqlmock v1.5.0 go: downloading github.com/imdario/mergo v0.3.16 go: downloading github.com/apache/pulsar-client-go v0.11.0 go: downloading github.com/aws/aws-sdk-go-v2 v1.19.1 go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading github.com/google/btree v1.1.2 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/KimMachineGun/automemlimit v0.2.4 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading github.com/r3labs/diff v1.1.0 go: downloading go.uber.org/atomic v1.11.0 go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading github.com/google/uuid v1.6.0 go: downloading golang.org/x/time v0.5.0 go: downloading github.com/swaggo/files v0.0.0-20210815190702-a29dd2bc99b2 go: downloading github.com/swaggo/gin-swagger v1.2.0 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading github.com/stretchr/testify v1.9.0 go: downloading github.com/cenkalti/backoff/v4 v4.2.1 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading go.etcd.io/etcd/server/v3 v3.5.12 go: downloading cloud.google.com/go v0.112.2 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading github.com/soheilhy/cmux v0.1.5 go: downloading github.com/hashicorp/golang-lru v0.5.1 go: downloading golang.org/x/sys v0.19.0 go: downloading github.com/robfig/cron v1.2.0 go: downloading github.com/benbjohnson/clock v1.3.5 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/goccy/go-json v0.10.2 go: downloading github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/phayes/freeport v0.0.0-20180830031419-95f893ade6f2 go: downloading github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/pierrec/lz4/v4 v4.1.18 go: downloading github.com/YangKeao/seahash v0.0.0-20240229041150-e7bf269c3140 go: downloading github.com/jcmturner/gokrb5/v8 v8.4.4 go: downloading github.com/segmentio/kafka-go v0.4.41-0.20230526171612-f057b1d369cd go: downloading github.com/uber-go/atomic v1.4.0 go: downloading go.etcd.io/etcd/pkg/v3 v3.5.12 go: downloading github.com/gavv/monotime v0.0.0-20190418164738-30dba4353424 go: downloading github.com/go-mysql-org/go-mysql v1.7.1-0.20240314115043-2199dfb0ba98 go: downloading github.com/pingcap/check v0.0.0-20211026125417-57bd13f7b5f0 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading github.com/xdg/stringprep v1.0.3 go: downloading golang.org/x/crypto v0.22.0 go: downloading github.com/containerd/cgroups v1.0.4 go: downloading golang.org/x/term v0.19.0 go: downloading github.com/golang/mock v1.6.0 go: downloading github.com/pingcap/tidb-dashboard v0.0.0-20240326110213-9768844ff5d7 go: downloading github.com/philhofer/fwd v1.1.1 go: downloading gorm.io/gorm v1.24.5 go: downloading github.com/swaggo/swag v1.16.3 go: downloading github.com/go-playground/validator/v10 v10.14.0 go: downloading github.com/pelletier/go-toml/v2 v2.0.8 go: downloading github.com/ugorji/go/codec v1.2.11 go: downloading google.golang.org/protobuf v1.33.0 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading github.com/gin-contrib/sse v0.1.0 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/aws/smithy-go v1.13.5 go: downloading github.com/golang/protobuf v1.5.4 go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/eapache/go-resiliency v1.4.0 go: downloading github.com/eapache/go-xerial-snappy v0.0.0-20230731223053-c322873962e3 go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/eapache/queue v1.1.0 go: downloading github.com/hashicorp/go-multierror v1.1.1 go: downloading github.com/jcmturner/gofork v1.7.6 go: downloading github.com/bits-and-blooms/bitset v1.4.0 go: downloading github.com/linkedin/goavro/v2 v2.11.1 go: downloading github.com/sirupsen/logrus v1.9.3 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading github.com/AthenZ/athenz v1.10.39 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda go: downloading cloud.google.com/go/iam v1.1.7 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading google.golang.org/api v0.170.0 go: downloading github.com/pierrec/lz4 v2.6.1+incompatible go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading github.com/stretchr/objx v0.5.2 go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/grpc-ecosystem/grpc-gateway v1.16.0 go: downloading github.com/tmc/grpc-websocket-proxy v0.0.0-20220101234140-673ab2c3ae75 go: downloading go.etcd.io/bbolt v1.3.9 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.22.0 go: downloading go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.22.0 go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/sdk v1.22.0 go: downloading sigs.k8s.io/yaml v1.4.0 go: downloading github.com/jcmturner/dnsutils/v2 v2.0.0 go: downloading github.com/hashicorp/go-uuid v1.0.3 go: downloading github.com/cakturk/go-netstat v0.0.0-20200220111822-e5b49efee7a5 go: downloading github.com/xdg-go/scram v1.1.2 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading github.com/docker/go-units v0.5.0 go: downloading golang.org/x/text v0.14.0 go: downloading github.com/godbus/dbus/v5 v5.0.4 go: downloading github.com/opencontainers/runtime-spec v1.0.2 go: downloading github.com/cilium/ebpf v0.4.0 go: downloading github.com/Masterminds/semver v1.5.0 go: downloading github.com/siddontang/go v0.0.0-20180604090527-bdc77568d726 go: downloading github.com/siddontang/go-log v0.0.0-20180807004314-8d05993dda07 go: downloading github.com/jinzhu/now v1.1.5 go: downloading github.com/glebarez/sqlite v1.7.0 go: downloading gorm.io/driver/mysql v1.3.3 go: downloading github.com/joomcode/errorx v1.0.1 go: downloading github.com/gabriel-vasile/mimetype v1.4.2 go: downloading github.com/go-playground/universal-translator v0.18.1 go: downloading github.com/leodido/go-urn v1.2.4 go: downloading github.com/KyleBanks/depth v1.2.1 go: downloading github.com/go-openapi/spec v0.21.0 go: downloading golang.org/x/tools v0.20.0 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/golang/snappy v0.0.4 go: downloading github.com/hashicorp/errwrap v1.0.0 go: downloading github.com/spaolacci/murmur3 v1.1.0 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading golang.org/x/mod v0.17.0 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading github.com/99designs/keyring v1.2.1 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/aws/aws-sdk-go-v2/credentials v1.13.29 go: downloading github.com/aws/aws-sdk-go-v2/config v1.18.30 go: downloading github.com/aws/aws-sdk-go-v2/service/glue v1.58.1 go: downloading github.com/jarcoal/httpmock v1.2.0 go: downloading github.com/mailru/easyjson v0.7.7 go: downloading github.com/gorilla/websocket v1.5.1 go: downloading go.etcd.io/etcd/raft/v3 v3.5.12 go: downloading github.com/jonboulle/clockwork v0.4.0 go: downloading github.com/xiang90/probing v0.0.0-20221125231312-a49e3df8f510 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading go.opentelemetry.io/proto/otlp v1.1.0 go: downloading github.com/jcmturner/aescts/v2 v2.0.0 go: downloading github.com/jcmturner/rpc/v2 v2.0.3 go: downloading github.com/xdg-go/pbkdf2 v1.0.0 go: downloading github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 go: downloading github.com/xdg-go/stringprep v1.0.4 go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading github.com/shopspring/decimal v1.3.0 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/jinzhu/inflection v1.0.0 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/glebarez/go-sqlite v1.21.2 go: downloading github.com/blacktear23/go-proxyprotocol v1.0.6 go: downloading github.com/pingcap/fn v1.0.0 go: downloading github.com/go-ozzo/ozzo-validation/v4 v4.3.0 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/tidwall/btree v1.7.0 go: downloading github.com/tiancaiamao/appdash v0.0.0-20181126055449-889f96f722a2 go: downloading github.com/sourcegraph/appdash-data v0.0.0-20151005221446-73f23eafcf67 go: downloading github.com/go-logr/logr v1.4.1 go: downloading github.com/edwingeng/deque v0.0.0-20191220032131-8596380dee17 go: downloading github.com/ardielle/ardielle-go v1.5.2 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/go-playground/locales v0.14.1 go: downloading github.com/golang-jwt/jwt/v4 v4.5.0 go: downloading go.etcd.io/etcd/client/v2 v2.305.12 go: downloading github.com/dvsekhvalnov/jose2go v1.5.0 go: downloading github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c go: downloading github.com/mtibben/percent v0.2.1 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/go-logr/stdr v1.2.2 go: downloading github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.13.6 go: downloading github.com/aws/aws-sdk-go-v2/internal/ini v1.3.37 go: downloading github.com/aws/aws-sdk-go-v2/service/sso v1.12.14 go: downloading github.com/aws/aws-sdk-go-v2/service/ssooidc v1.14.14 go: downloading github.com/aws/aws-sdk-go-v2/service/sts v1.20.1 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/go-openapi/jsonpointer v0.21.0 go: downloading github.com/go-openapi/jsonreference v0.21.0 go: downloading github.com/go-openapi/swag v0.23.0 go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/aws/aws-sdk-go-v2/internal/configsources v1.1.36 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading github.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0 go: downloading modernc.org/libc v1.37.1 go: downloading modernc.org/sqlite v1.27.0 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/apache/thrift v0.16.0 go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.9.30 go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/jfcg/sixb v1.3.8 go: downloading github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.4.30 go: downloading github.com/vbauerster/mpb/v7 v7.5.3 go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/kr/text v0.2.0 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading github.com/emirpasic/gods v1.18.1 go: downloading k8s.io/api v0.28.6 go: downloading github.com/josharian/intern v1.0.0 go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading k8s.io/apimachinery v0.28.6 go: downloading gopkg.in/inf.v0 v0.9.1 go: downloading k8s.io/klog/v2 v2.120.1 go: downloading github.com/google/gofuzz v1.2.0 go: downloading sigs.k8s.io/structured-merge-diff/v4 v4.4.1 go: downloading sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd go: downloading k8s.io/utils v0.0.0-20230726121419-3b25d923346b go: downloading modernc.org/memory v1.7.2 go: downloading modernc.org/mathutil v1.6.0 go: downloading github.com/jmespath/go-jmespath v0.4.0 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading github.com/google/s2a-go v0.1.7 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/felixge/httpsnoop v1.0.4 + '[' -f ./bin/cdc_kafka_consumer ']' + make kafka_consumer CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-79-gc950cce3a" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 08:38:23" -X "github.com/pingcap/tiflow/pkg/version.GitHash=c950cce3a9b105fd95bb2c788e1ab69ec32e0668" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-79-gc950cce3a"' -o bin/cdc_kafka_consumer ./cmd/kafka-consumer/main.go + '[' -f ./bin/cdc_storage_consumer ']' + make storage_consumer CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-79-gc950cce3a" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 08:38:30" -X "github.com/pingcap/tiflow/pkg/version.GitHash=c950cce3a9b105fd95bb2c788e1ab69ec32e0668" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-79-gc950cce3a"' -o bin/cdc_storage_consumer ./cmd/storage-consumer/main.go + '[' -f ./bin/cdc.test ']' + make integration_test_build cd tools/check && GO111MODULE=on go build -mod=mod -o ../bin/failpoint-ctl github.com/pingcap/failpoint/failpoint-ctl go: downloading github.com/pingcap/failpoint v0.0.0-20210316064728-7acb0f0a3dfd go: downloading github.com/sergi/go-diff v1.1.0 CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-79-gc950cce3a" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 08:38:37" -X "github.com/pingcap/tiflow/pkg/version.GitHash=c950cce3a9b105fd95bb2c788e1ab69ec32e0668" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-79-gc950cce3a"' -o bin/cdc_storage_consumer ./cmd/storage-consumer/main.go CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-79-gc950cce3a" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 08:38:37" -X "github.com/pingcap/tiflow/pkg/version.GitHash=c950cce3a9b105fd95bb2c788e1ab69ec32e0668" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-79-gc950cce3a"' -o bin/cdc_kafka_consumer ./cmd/kafka-consumer/main.go CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-79-gc950cce3a" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 08:38:37" -X "github.com/pingcap/tiflow/pkg/version.GitHash=c950cce3a9b105fd95bb2c788e1ab69ec32e0668" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-79-gc950cce3a"' -o bin/cdc_pulsar_consumer ./cmd/pulsar-consumer/main.go CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-79-gc950cce3a" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 08:38:37" -X "github.com/pingcap/tiflow/pkg/version.GitHash=c950cce3a9b105fd95bb2c788e1ab69ec32e0668" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-79-gc950cce3a"' -o bin/oauth2-server ./cmd/oauth2-server/main.go go: downloading github.com/go-oauth2/oauth2/v4 v4.5.2 go: downloading github.com/tidwall/buntdb v1.3.0 go: downloading github.com/tidwall/gjson v1.14.3 go: downloading github.com/tidwall/grect v0.1.4 go: downloading github.com/tidwall/match v1.1.1 go: downloading github.com/tidwall/rtred v0.1.2 go: downloading github.com/tidwall/tinyqueue v0.1.1 go: downloading github.com/tidwall/pretty v1.2.0 $(echo $(for p in $(go list ./... | grep -vE 'vendor|proto|tiflow/tests|integration|testing_utils|pb|pbmock|tiflow/bin'); do echo ${p#"github.com/pingcap/tiflow/"}|grep -v "github.com/pingcap/tiflow"; done) | xargs tools/bin/failpoint-ctl enable >/dev/null) go: downloading github.com/DataDog/zstd v1.5.5 go: downloading github.com/PingCAP-QE/go-sqlsmith v0.0.0-20231213065948-336e064b488d go: downloading github.com/chzyer/readline v1.5.1 go: downloading github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 go: downloading github.com/gogo/gateway v1.1.0 go: downloading github.com/deepmap/oapi-codegen v1.9.0 go: downloading github.com/getkin/kin-openapi v0.80.0 go: downloading github.com/shurcooL/httpgzip v0.0.0-20190720172056-320755c1c1b0 go: downloading github.com/syndtr/goleveldb v1.0.1-0.20210305035536-64b5b1c73954 go: downloading github.com/mattn/go-shellwords v1.0.12 go: downloading github.com/ngaut/log v0.0.0-20210830112240-0124ec040aeb go: downloading go.uber.org/dig v1.13.0 go: downloading github.com/VividCortex/mysqlerr v1.0.0 go: downloading go.uber.org/ratelimit v0.2.0 go: downloading go.uber.org/goleak v1.3.0 go: downloading github.com/bradleyjkemp/grpc-tools v0.2.5 go: downloading github.com/integralist/go-findroot v0.0.0-20160518114804-ac90681525dc go: downloading github.com/jmoiron/sqlx v1.3.3 go: downloading upper.io/db.v3 v3.7.1+incompatible go: downloading github.com/andres-erbsen/clock v0.0.0-20160526145045-9e14626cd129 go: downloading github.com/improbable-eng/grpc-web v0.12.0 go: downloading github.com/ghodss/yaml v1.0.0 go: downloading github.com/rs/cors v1.7.0 go: downloading github.com/desertbit/timer v0.0.0-20180107155436-c41aec40b27f CGO_ENABLED=1 GO111MODULE=on go test -p 3 --race --tags=intest -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-79-gc950cce3a" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 08:38:37" -X "github.com/pingcap/tiflow/pkg/version.GitHash=c950cce3a9b105fd95bb2c788e1ab69ec32e0668" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-79-gc950cce3a"' -c -cover -covermode=atomic \ -coverpkg=github.com/pingcap/tiflow/... \ -o bin/cdc.test github.com/pingcap/tiflow/cmd/cdc \ || { $(echo $(for p in $(go list ./... | grep -vE 'vendor|proto|tiflow/tests|integration|testing_utils|pb|pbmock|tiflow/bin'); do echo ${p#"github.com/pingcap/tiflow/"}|grep -v "github.com/pingcap/tiflow"; done) | xargs tools/bin/failpoint-ctl disable >/dev/null); exit 1; } CGO_ENABLED=0 GO111MODULE=on go build -trimpath -ldflags '-X "github.com/pingcap/tiflow/pkg/version.ReleaseVersion=v8.2.0-alpha-79-gc950cce3a" -X "github.com/pingcap/tiflow/pkg/version.BuildTS=2024-05-04 08:38:37" -X "github.com/pingcap/tiflow/pkg/version.GitHash=c950cce3a9b105fd95bb2c788e1ab69ec32e0668" -X "github.com/pingcap/tiflow/pkg/version.GitBranch=HEAD" -X "github.com/pingcap/tiflow/pkg/version.GoVersion=go version go1.21.0 linux/amd64" -X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-79-gc950cce3a"' -o bin/cdc ./cmd/cdc/main.go \ || { $(echo $(for p in $(go list ./... | grep -vE 'vendor|proto|tiflow/tests|integration|testing_utils|pb|pbmock|tiflow/bin'); do echo ${p#"github.com/pingcap/tiflow/"}|grep -v "github.com/pingcap/tiflow"; done) | xargs tools/bin/failpoint-ctl disable >/dev/null); exit 1; } $(echo $(for p in $(go list ./... | grep -vE 'vendor|proto|tiflow/tests|integration|testing_utils|pb|pbmock|tiflow/bin'); do echo ${p#"github.com/pingcap/tiflow/"}|grep -v "github.com/pingcap/tiflow"; done) | xargs tools/bin/failpoint-ctl disable >/dev/null) + ls -alh ./bin total 1.2G drwxr-sr-x. 2 jenkins jenkins 4.0K May 4 16:43 . drwxr-sr-x. 19 jenkins jenkins 4.0K May 4 16:36 .. -rwxr-xr-x. 1 jenkins jenkins 220M May 4 16:43 cdc -rwxr-xr-x. 1 jenkins jenkins 359M May 4 16:42 cdc.test -rwxr-xr-x. 1 jenkins jenkins 183M May 4 16:38 cdc_kafka_consumer -rwxr-xr-x. 1 jenkins jenkins 183M May 4 16:39 cdc_pulsar_consumer -rwxr-xr-x. 1 jenkins jenkins 182M May 4 16:38 cdc_storage_consumer -rwxr-xr-x. 1 jenkins jenkins 12M May 4 16:39 oauth2-server + ./bin/cdc version Release Version: v8.2.0-alpha-79-gc950cce3a Git Commit Hash: c950cce3a9b105fd95bb2c788e1ab69ec32e0668 Git Branch: HEAD UTC Build Time: 2024-05-04 08:38:37 Go Version: go version go1.21.0 linux/amd64 Failpoint Build: true [Pipeline] } Cache saved successfully (binary/pingcap/tiflow/cdc-integration-test/rev-be15534-c950cce) 1191672320 bytes in 19.94 secs (59757343 bytes/sec) [Pipeline] // cache [Pipeline] cache Cache not restored (no such key found) [Pipeline] { [Pipeline] sh + cp -r ../third_party_download/bin/bin ../third_party_download/bin/etc ../third_party_download/bin/etcdctl ../third_party_download/bin/go-ycsb ../third_party_download/bin/jq ../third_party_download/bin/lib ../third_party_download/bin/libc++.so.1 ../third_party_download/bin/libc++.so.1.0 ../third_party_download/bin/libc++abi.so.1 ../third_party_download/bin/libc++abi.so.1.0 ../third_party_download/bin/libgmssl.so ../third_party_download/bin/libgmssl.so.3 ../third_party_download/bin/libgmssl.so.3.0 ../third_party_download/bin/libtiflash_proxy.so ../third_party_download/bin/minio ../third_party_download/bin/pd-api-bench ../third_party_download/bin/pd-ctl ../third_party_download/bin/pd-heartbeat-bench ../third_party_download/bin/pd-recover ../third_party_download/bin/pd-server ../third_party_download/bin/pd-tso-bench ../third_party_download/bin/pd-ut ../third_party_download/bin/regions-dump ../third_party_download/bin/share ../third_party_download/bin/stores-dump ../third_party_download/bin/sync_diff_inspector ../third_party_download/bin/tidb-server ../third_party_download/bin/tiflash ../third_party_download/bin/tikv-server ../third_party_download/bin/xprog ./bin/ + ls -alh ./bin total 3.0G drwxr-sr-x. 6 jenkins jenkins 4.0K May 4 16:43 . drwxr-sr-x. 19 jenkins jenkins 4.0K May 4 16:36 .. drwxr-sr-x. 2 jenkins jenkins 4.0K May 4 16:43 bin -rwxr-xr-x. 1 jenkins jenkins 220M May 4 16:43 cdc -rwxr-xr-x. 1 jenkins jenkins 359M May 4 16:42 cdc.test -rwxr-xr-x. 1 jenkins jenkins 183M May 4 16:38 cdc_kafka_consumer -rwxr-xr-x. 1 jenkins jenkins 183M May 4 16:39 cdc_pulsar_consumer -rwxr-xr-x. 1 jenkins jenkins 182M May 4 16:38 cdc_storage_consumer drwxr-sr-x. 4 jenkins jenkins 4.0K May 4 16:43 etc -rwxr-xr-x. 1 jenkins jenkins 17M May 4 16:43 etcdctl -rwxr-xr-x. 1 jenkins jenkins 44M May 4 16:43 go-ycsb -rwxr-xr-x. 1 jenkins jenkins 3.8M May 4 16:43 jq drwxr-sr-x. 3 jenkins jenkins 4.0K May 4 16:43 lib lrwxrwxrwx. 1 jenkins jenkins 13 May 4 16:43 libc++.so.1 -> libc++.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 1016K May 4 16:43 libc++.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 16 May 4 16:43 libc++abi.so.1 -> libc++abi.so.1.0 -rwxr-xr-x. 1 jenkins jenkins 358K May 4 16:43 libc++abi.so.1.0 lrwxrwxrwx. 1 jenkins jenkins 13 May 4 16:43 libgmssl.so -> libgmssl.so.3 lrwxrwxrwx. 1 jenkins jenkins 15 May 4 16:43 libgmssl.so.3 -> libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 2.6M May 4 16:43 libgmssl.so.3.0 -rwxr-xr-x. 1 jenkins jenkins 272M May 4 16:43 libtiflash_proxy.so -rwxr-xr-x. 1 jenkins jenkins 50M May 4 16:43 minio -rwxr-xr-x. 1 jenkins jenkins 12M May 4 16:39 oauth2-server -rwxr-xr-x. 1 jenkins jenkins 37M May 4 16:43 pd-api-bench -rwxr-xr-x. 1 jenkins jenkins 44M May 4 16:43 pd-ctl -rwxr-xr-x. 1 jenkins jenkins 36M May 4 16:43 pd-heartbeat-bench -rwxr-xr-x. 1 jenkins jenkins 32M May 4 16:43 pd-recover -rwxr-xr-x. 1 jenkins jenkins 106M May 4 16:43 pd-server -rwxr-xr-x. 1 jenkins jenkins 26M May 4 16:43 pd-tso-bench -rwxr-xr-x. 1 jenkins jenkins 3.0M May 4 16:43 pd-ut -rwxr-xr-x. 1 jenkins jenkins 32M May 4 16:43 regions-dump drwxr-sr-x. 4 jenkins jenkins 4.0K May 4 16:43 share -rwxr-xr-x. 1 jenkins jenkins 32M May 4 16:43 stores-dump -rwxr-xr-x. 1 jenkins jenkins 192M May 4 16:43 sync_diff_inspector -rwxr-xr-x. 1 jenkins jenkins 208M May 4 16:43 tidb-server -rwxr-xr-x. 1 jenkins jenkins 380M May 4 16:43 tiflash -rwxr-xr-x. 1 jenkins jenkins 418M May 4 16:43 tikv-server -rwxr-xr-x. 1 jenkins jenkins 2.0M May 4 16:43 xprog [Pipeline] } Cache saved successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc) 3727120896 bytes in 86.06 secs (43306301 bytes/sec) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Tests) [Pipeline] parallel [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G00') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G01') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G02') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G03') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G04') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G05') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G06') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G07') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G08') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G09') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G10') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G11') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G12') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G13') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G14') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G15') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G16') [Pipeline] { (Branch: Matrix - TEST_GROUP = 'G17') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G00') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G01') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G02') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G03') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G04') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G05') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G06') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G07') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G08') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G09') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G10') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G11') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G12') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G13') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G14') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G15') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G16') [Pipeline] stage [Pipeline] { (Matrix - TEST_GROUP = 'G17') [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted [Pipeline] readTrusted Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fgs7x-pq0q7 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3bwq1-gzbp1 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fzbr4-3vph9 Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844 Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3hd5h-7tzbj Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1836-sj989-z6hpm Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tvq5x-hqf9m Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1836-kjzln-hfb2r Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1836-ppnsc-4cvmp [Pipeline] node Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fgs7x-pq0q7 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1836-fgs7x-6p2vm --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "46f96d5684cf090ef4299fec4fa82416c808eeb9" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1836-fgs7x" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fgs7x-pq0q7" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fgs7x-pq0q7" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fgs7x-pq0q7" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fgs7x-pq0q7 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] checkout The recommended git tool is: git Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1836-f9n2q-3jlvd No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@55b04f6; decorates RemoteLauncher[hudson.remoting.Channel@53025abb:JNLP4-connect connection from 10.233.72.36/10.233.72.36:37604] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1836-70rl0-5g8b2 Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1836-r5bkm-78p33 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "8495d762ba55094f49418b8233a0f16c729beb97" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1836-r5bkm" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] checkout The recommended git tool is: git Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1836-kjzln-hfb2r is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1836-kjzln-bpnhs --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "a1daa73e5379c18c495e64e5adfaf07a1daa6bde" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1836-kjzln" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-kjzln-hfb2r" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-kjzln-hfb2r" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-kjzln-hfb2r" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1836-kjzln-hfb2r in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] checkout The recommended git tool is: git Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3bwq1-gzbp1 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1836-3bwq1-73pmk --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "b337efdb73f72d8c7ead9d980ee4e9ff0e4a6710" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1836-3bwq1" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3bwq1-gzbp1" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3bwq1-gzbp1" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3bwq1-gzbp1" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3bwq1-gzbp1 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] checkout The recommended git tool is: git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tgv7p-w9730 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@5140f1bc; decorates RemoteLauncher[hudson.remoting.Channel@4948ebbb:JNLP4-connect connection from 10.233.105.201/10.233.105.201:40554] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@8f3555; decorates RemoteLauncher[hudson.remoting.Channel@1b20bbd9:JNLP4-connect connection from 10.233.127.103/10.233.127.103:42236] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1836-sj989-z6hpm is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1836-sj989-6gp46 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "dd867897957975b58e61b1166f0ecaa70537e403" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1836-sj989" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-sj989-z6hpm" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-sj989-z6hpm" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-sj989-z6hpm" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1836-sj989-z6hpm in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] checkout The recommended git tool is: git Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@2bea76af; decorates RemoteLauncher[hudson.remoting.Channel@16ed3ed0:JNLP4-connect connection from 10.233.69.170/10.233.69.170:44614] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tvq5x-hqf9m is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1836-tvq5x-b3x5l --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "ab54df466488fc191d943f89f73c536ec4fe9949" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1836-tvq5x" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tvq5x-hqf9m" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tvq5x-hqf9m" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tvq5x-hqf9m" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tvq5x-hqf9m in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { [Pipeline] checkout Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fzbr4-3vph9 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1836-fzbr4-kp4z3 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "0bf1c50d30286595d0a05b6d8fef5d7ce2fe4866" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1836-fzbr4" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fzbr4-3vph9" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fzbr4-3vph9" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fzbr4-3vph9" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fzbr4-3vph9 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test The recommended git tool is: git [Pipeline] podTemplate [Pipeline] { [Pipeline] { Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3hd5h-7tzbj is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1836-3hd5h-drjmg --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "c6b797ceac1cdfa2e29c98c99dac890dde5f7c22" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1836-3hd5h" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3hd5h-7tzbj" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3hd5h-7tzbj" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3hd5h-7tzbj" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1836-bp09q-gthp5 [Pipeline] node Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3hd5h-7tzbj in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] checkout The recommended git tool is: git [Pipeline] { [Pipeline] checkout The recommended git tool is: git No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@28b5f871; decorates RemoteLauncher[hudson.remoting.Channel@9905db6:JNLP4-connect connection from 10.233.93.45/10.233.93.45:55252] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1836-ppnsc-4cvmp is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1836-ppnsc-jb9mw --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "891d218b7fbd84148ef4edfff14abf126eb65643" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1836-ppnsc" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-ppnsc-4cvmp" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-ppnsc-4cvmp" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-ppnsc-4cvmp" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1836-ppnsc-4cvmp in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test [Pipeline] { Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Commit message: "fix(br): use failpoint tidb-server instead (#2951)" No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@59bf7a24; decorates RemoteLauncher[hudson.remoting.Channel@5e6b2ea0:JNLP4-connect connection from 10.233.86.45/10.233.86.45:57150] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 [Pipeline] withEnv [Pipeline] { Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] checkout The recommended git tool is: git [Pipeline] podTemplate Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 [Pipeline] { Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k8pxt-9vhj7 [Pipeline] container [Pipeline] { [Pipeline] node [Pipeline] stage [Pipeline] { (Test) No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@2f7d4aad; decorates RemoteLauncher[hudson.remoting.Channel@71e62052:JNLP4-connect connection from 10.233.90.90/10.233.90.90:50984] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] withCredentials No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@3b30be9; decorates RemoteLauncher[hudson.remoting.Channel@1cb0d2d:JNLP4-connect connection from 10.233.70.248/10.233.70.248:33510] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN Cloning repository https://github.com/PingCAP-QE/ci.git [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 [Pipeline] cache No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@7291a94f; decorates RemoteLauncher[hudson.remoting.Channel@50214911:JNLP4-connect connection from 10.233.107.131/10.233.107.131:39650] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1836-f9n2q-3jlvd is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1836-f9n2q-23qlf --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "9a527d9e80bcef015f060e3625f5d2d69de36406" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1836-f9n2q" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-f9n2q-3jlvd" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-f9n2q-3jlvd" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-f9n2q-3jlvd" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1836-f9n2q-3jlvd in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1836-s880q-kprjl Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1836-bp09q-gthp5 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1836-bp09q-d2lj4 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "0a12e7ab29458cf33f48f2ca592a913ab6b7bab4" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1836-bp09q" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-bp09q-gthp5" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-bp09q-gthp5" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-bp09q-gthp5" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1836-bp09q-gthp5 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1836-70rl0-5g8b2 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1836-70rl0-hcfl3 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "d0be6f298332ccd6e93b87ccb0dea553ebe90cc8" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1836-70rl0" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-70rl0-5g8b2" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-70rl0-5g8b2" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-70rl0-5g8b2" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k8pxt-9vhj7 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1836-k8pxt-f4g1x --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "e6235bc23ef0dd0c539d61841645914a7b9707d0" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1836-k8pxt" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k8pxt-9vhj7" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k8pxt-9vhj7" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k8pxt-9vhj7" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k8pxt-9vhj7 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1836-70rl0-5g8b2 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tgv7p-w9730 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1836-tgv7p-sbplc --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "4f8d40022a0b5a0ddc6c7c32b6a3788e5c48cf65" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1836-tgv7p" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tgv7p-w9730" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tgv7p-w9730" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tgv7p-w9730" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tgv7p-w9730 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1836-s880q-kprjl is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1836-s880q-qqxkz --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "877077e9e19b6fad474c0b7d72f41cbc48b974ac" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1836-s880q" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-s880q-kprjl" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-s880q-kprjl" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-s880q-kprjl" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1836-s880q-kprjl in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc) 3727120896 bytes in 24.63 secs (151316476 bytes/sec) [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] podTemplate [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] node [Pipeline] node [Pipeline] node [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] sh [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] container [Pipeline] { [Pipeline] checkout [Pipeline] { The recommended git tool is: git [Pipeline] { Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1836-9z3t4-2gjxq Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k7l3s-qm8cw [Pipeline] { Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1836-td6bn-w0ksf [Pipeline] { [Pipeline] { [Pipeline] checkout [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout [Pipeline] stage [Pipeline] { (Test) The recommended git tool is: git [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) The recommended git tool is: git [Pipeline] } [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@7363f982; decorates RemoteLauncher[hudson.remoting.Channel@74a119f3:JNLP4-connect connection from 10.233.97.229/10.233.97.229:39944] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] // timeout [Pipeline] } [Pipeline] { [Pipeline] { No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@1e2f004e; decorates RemoteLauncher[hudson.remoting.Channel@52fef50e:JNLP4-connect connection from 10.233.88.131/10.233.88.131:50846] will be ignored (a typical symptom is the Git executable not being run inside a designated container) [Pipeline] { Cloning the remote Git repository Using shallow clone with depth 1 [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] // container [Pipeline] sh No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@3de603f7; decorates RemoteLauncher[hudson.remoting.Channel@4b75d85f:JNLP4-connect connection from 10.233.67.44/10.233.67.44:42190] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@27791f23; decorates RemoteLauncher[hudson.remoting.Channel@1876c0a2:JNLP4-connect connection from 10.233.100.159/10.233.100.159:60742] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@60d65e55; decorates RemoteLauncher[hudson.remoting.Channel@b30d5d6:JNLP4-connect connection from 10.233.71.30/10.233.71.30:40434] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@48154565; decorates RemoteLauncher[hudson.remoting.Channel@5eea0709:JNLP4-connect connection from 10.233.106.206/10.233.106.206:38490] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G02 Run cases: consistent_replicate_ddl consistent_replicate_gbk consistent_replicate_nfs consistent_replicate_storage_file consistent_replicate_storage_file_large_value consistent_replicate_storage_s3 consistent_partition_table kafka_big_messages_v2 multi_tables_ddl_v2 multi_topics_v2 storage_cleanup csv_storage_basic csv_storage_multi_tables_ddl csv_storage_partition_table PROW_JOB_ID=8011d7ec-925e-42c5-a960-c8067df92ded JENKINS_NODE_COOKIE=92563684-50a2-47db-b696-e18d42887b07 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fgs7x-pq0q7 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786675747499282435","prowjobid":"8011d7ec-925e-42c5-a960-c8067df92ded","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/c950cce3a9b105fd95bb2c788e1ab69ec32e0668","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1836 TEST_GROUP=G02 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786675747499282435 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-fgs7x GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fgs7x-pq0q7 pingcap_tiflow_pull_cdc_integration_kafka_test_1836-fgs7x GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fgs7x-pq0q7 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1836 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_ddl/run.sh using Sink-Type: kafka... <<================= [Sat May 4 16:45:52 CST 2024] <<<<<< run test case consistent_replicate_ddl success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] cache Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Commit message: "fix(br): use failpoint tidb-server instead (#2951)" > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1836-9z3t4-2gjxq is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1836-9z3t4-2fx7n --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "4e11aec6f9997a2b2eb1cf737216b25334fb063b" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1836-9z3t4" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-9z3t4-2gjxq" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-9z3t4-2gjxq" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-9z3t4-2gjxq" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1836-9z3t4-2gjxq in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Commit message: "fix(br): use failpoint tidb-server instead (#2951)" Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1836-td6bn-w0ksf is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1836-td6bn-rvx42 --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "83384ffba14df7b9f17fa2bf6a4e7758e31c2ae6" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1836-td6bn" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-td6bn-w0ksf" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-td6bn-w0ksf" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-td6bn-w0ksf" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1836-td6bn-w0ksf in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k7l3s-qm8cw is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1836-k7l3s-hzhsv --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "d00b8f73383b98e49a138ed5759d6f43501ceb28" jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1836-k7l3s" name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k7l3s-qm8cw" namespace: "jenkins-tiflow" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" containers: - image: "wurstmeister/zookeeper" imagePullPolicy: "IfNotPresent" name: "zookeeper" resources: limits: cpu: "2000m" memory: "4Gi" requests: cpu: "2000m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - args: - "cat" image: "hub.pingcap.net/jenkins/golang-tini:1.21" imagePullPolicy: "Always" name: "golang" resources: limits: cpu: "12" memory: "32Gi" requests: cpu: "12" memory: "32Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_CREATE_TOPICS" value: "big-message-test:1:1" - name: "KAFKA_BROKER_ID" value: "1" - name: "KAFKA_SSL_KEYSTORE_PASSWORD" value: "test1234" - name: "KAFKA_ZOOKEEPER_CONNECT" value: "localhost:2181" - name: "KAFKA_MESSAGE_MAX_BYTES" value: "11534336" - name: "KAFKA_REPLICA_FETCH_MAX_BYTES" value: "11534336" - name: "KAFKA_ADVERTISED_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "ZK" value: "zk" - name: "KAFKA_SSL_KEYSTORE_LOCATION" value: "/tmp/kafka.server.keystore.jks" - name: "KAFKA_SSL_KEY_PASSWORD" value: "test1234" - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD" value: "test1234" - name: "KAFKA_LISTENERS" value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092" - name: "KAFKA_SSL_TRUSTSTORE_LOCATION" value: "/tmp/kafka.server.truststore.jks" - name: "RACK_COMMAND" value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\ \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\ \ -o /tmp/kafka.server.truststore.jks" image: "wurstmeister/kafka:2.12-2.4.1" imagePullPolicy: "IfNotPresent" name: "kafka" resources: limits: cpu: "4000m" memory: "6Gi" requests: cpu: "4000m" memory: "6Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "KAFKA_SERVER" value: "127.0.0.1:9092" - name: "ZOOKEEPER_SERVER" value: "127.0.0.1:2181" - name: "DOWNSTREAM_DB_HOST" value: "127.0.0.1" - name: "USE_FLAT_MESSAGE" value: "true" - name: "DOWNSTREAM_DB_PORT" value: "3306" - name: "DB_NAME" value: "test" image: "rustinliu/ticdc-canal-json-adapter:latest" imagePullPolicy: "IfNotPresent" name: "canal-adapter" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/tmp" name: "volume-0" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: requests: cpu: "200m" memory: "4Gi" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "MYSQL_ROOT_PASSWORD" value: "" - name: "MYSQL_USER" value: "mysqluser" - name: "MYSQL_PASSWORD" value: "mysqlpw" - name: "MYSQL_ALLOW_EMPTY_PASSWORD" value: "yes" - name: "MYSQL_TCP_PORT" value: "3310" image: "quay.io/debezium/example-mysql:2.4" imagePullPolicy: "IfNotPresent" name: "mysql" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "BOOTSTRAP_SERVERS" value: "127.0.0.1:9092" - name: "GROUP_ID" value: "1" - name: "CONFIG_STORAGE_TOPIC" value: "my_connect_configs" - name: "OFFSET_STORAGE_TOPIC" value: "my_connect_offsets" - name: "STATUS_STORAGE_TOPIC" value: "my_connect_statuses" image: "quay.io/debezium/connect:2.4" name: "connect" resources: requests: cpu: "200m" memory: "4Gi" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k7l3s-qm8cw" - name: "JENKINS_NAME" value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k7l3s-qm8cw" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "volume-0" - emptyDir: medium: "" name: "workspace-volume" Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k7l3s-qm8cw in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_gbk/run.sh using Sink-Type: kafka... <<================= * About to connect() to 127.0.0.1 port 24927 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:24927; Connection refused * Closing connection 0 Commit message: "fix(br): use failpoint tidb-server instead (#2951)" You are running an older version of MinIO released 3 years ago Update: Run `mc admin update` Attempting encryption of all config, IAM users and policies on MinIO backend Endpoint: http://127.0.0.1:24927 Object API (Amazon S3 compatible): Go: https://docs.min.io/docs/golang-client-quickstart-guide Java: https://docs.min.io/docs/java-client-quickstart-guide Python: https://docs.min.io/docs/python-client-quickstart-guide JavaScript: https://docs.min.io/docs/javascript-client-quickstart-guide .NET: https://docs.min.io/docs/dotnet-client-quickstart-guide * About to connect() to 127.0.0.1 port 24927 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 24927 (#0) > GET / HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:24927 > Accept: */* > < HTTP/1.1 403 Forbidden < Accept-Ranges: bytes < Content-Length: 226 < Content-Security-Policy: block-all-mixed-content < Content-Type: application/xml < Server: MinIO/RELEASE.2020-07-27T18-37-02Z < Vary: Origin < X-Amz-Request-Id: 17CC3CC10B1F479D < X-Xss-Protection: 1; mode=block < Date: Sat, 04 May 2024 08:45:57 GMT < { [data not shown] * Connection #0 to host 127.0.0.1 left intact Bucket 's3://logbucket/' created [Sat May 4 16:45:58 CST 2024] <<<<<< run test case consistent_replicate_gbk success! >>>>>> Exiting on signal: INTERRUPT =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_nfs/run.sh using Sink-Type: kafka... <<================= [Sat May 4 16:46:01 CST 2024] <<<<<< run test case consistent_replicate_nfs success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc) 3727120896 bytes in 6.92 secs (538874848 bytes/sec) [Pipeline] { [Pipeline] cache find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_storage_file/run.sh using Sink-Type: kafka... <<================= [Sat May 4 16:46:04 CST 2024] <<<<<< run test case consistent_replicate_storage_file success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_storage_file_large_value/run.sh using Sink-Type: kafka... <<================= [Sat May 4 16:46:07 CST 2024] <<<<<< run test case consistent_replicate_storage_file_large_value success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_storage_s3/run.sh using Sink-Type: kafka... <<================= * About to connect() to 127.0.0.1 port 24927 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:24927; Connection refused * Closing connection 0 You are running an older version of MinIO released 3 years ago Update: Run `mc admin update` Attempting encryption of all config, IAM users and policies on MinIO backend Endpoint: http://127.0.0.1:24927 Object API (Amazon S3 compatible): Go: https://docs.min.io/docs/golang-client-quickstart-guide Java: https://docs.min.io/docs/java-client-quickstart-guide Python: https://docs.min.io/docs/python-client-quickstart-guide JavaScript: https://docs.min.io/docs/javascript-client-quickstart-guide .NET: https://docs.min.io/docs/dotnet-client-quickstart-guide Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc) 3727120896 bytes in 7.86 secs (473991026 bytes/sec) [Pipeline] { [Pipeline] cache * About to connect() to 127.0.0.1 port 24927 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 24927 (#0) > GET / HTTP/1.1 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:24927 > Accept: */* > < HTTP/1.1 403 Forbidden < Accept-Ranges: bytes < Content-Length: 226 < Content-Security-Policy: block-all-mixed-content < Content-Type: application/xml < Server: MinIO/RELEASE.2020-07-27T18-37-02Z < Vary: Origin < X-Amz-Request-Id: 17CC3CC48FFF060F < X-Xss-Protection: 1; mode=block < Date: Sat, 04 May 2024 08:46:13 GMT < { [data not shown] * Connection #0 to host 127.0.0.1 left intact Bucket 's3://logbucket/' created [Sat May 4 16:46:13 CST 2024] <<<<<< run test case consistent_replicate_storage_s3 success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Exiting on signal: INTERRUPT =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_partition_table/run.sh using Sink-Type: kafka... <<================= [Sat May 4 16:46:16 CST 2024] <<<<<< run test case consistent_partition_table success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_big_messages_v2/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... start tidb cluster in /tmp/tidb_cdc_test/kafka_big_messages_v2 Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2252180013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fgs7x-pq0q7, pid:2119, start at 2024-05-04 16:46:32.597921387 +0800 CST m=+5.122989621 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:48:32.604 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:46:32.582 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:36:32.582 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2252180013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fgs7x-pq0q7, pid:2119, start at 2024-05-04 16:46:32.597921387 +0800 CST m=+5.122989621 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:48:32.604 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:46:32.582 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:36:32.582 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2253a00005 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fgs7x-pq0q7, pid:2199, start at 2024-05-04 16:46:32.683481602 +0800 CST m=+5.146419645 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:48:32.689 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:46:32.680 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:36:32.680 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/log/proxy.log"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash-proxy.toml"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc) 3727120896 bytes in 21.13 secs (176431482 bytes/sec) [Pipeline] { [Pipeline] cache [Sat May 4 16:46:35 CST 2024] <<<<<< START cdc server in kafka_big_messages_v2 case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_big_messages_v2.36213623.out server --log-file /tmp/tidb_cdc_test/kafka_big_messages_v2/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_big_messages_v2/cdc_data --cluster-id default + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:46:38 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ebaac881-fb46-479f-bac1-1bba889ff570 {"id":"ebaac881-fb46-479f-bac1-1bba889ff570","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812396} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42c87092c5 ebaac881-fb46-479f-bac1-1bba889ff570 /tidb/cdc/default/default/upstream/7365063108478959905 {"id":7365063108478959905,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ebaac881-fb46-479f-bac1-1bba889ff570 {"id":"ebaac881-fb46-479f-bac1-1bba889ff570","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812396} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42c87092c5 ebaac881-fb46-479f-bac1-1bba889ff570 /tidb/cdc/default/default/upstream/7365063108478959905 {"id":7365063108478959905,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ebaac881-fb46-479f-bac1-1bba889ff570 {"id":"ebaac881-fb46-479f-bac1-1bba889ff570","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812396} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42c87092c5 ebaac881-fb46-479f-bac1-1bba889ff570 /tidb/cdc/default/default/upstream/7365063108478959905 {"id":7365063108478959905,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: cbe3ee25-9b18-40ec-8de5-f5b0feb7373c Info: {"upstream_id":7365063108478959905,"namespace":"default","id":"cbe3ee25-9b18-40ec-8de5-f5b0feb7373c","sink_uri":"kafka://127.0.0.1:9092/big-message-test?protocol=open-protocol\u0026partition-num=1\u0026kafka-version=2.4.1\u0026max-message-bytes=12582912","create_time":"2024-05-04T16:46:39.17940316+08:00","start_ts":449527780693245953,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527780693245953,"checkpoint_ts":449527780693245953,"checkpoint_time":"2024-05-04 16:46:35.833"} [Sat May 4 16:46:39 CST 2024] <<<<<< START kafka consumer in kafka_big_messages_v2 case >>>>>> Starting generate kafka big messages... go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading go.uber.org/atomic v1.11.0 Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc) 3727120896 bytes in 6.02 secs (618850831 bytes/sec) [Pipeline] { [Pipeline] cache table kafka_big_messages.test exists check diff failed 1-th time, retry later check diff failed 2-th time, retry later check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... wait process cdc.test exit for 3-th time... cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 16:46:52 CST 2024] <<<<<< run test case kafka_big_messages_v2 success! >>>>>> =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_tables_ddl_v2/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... start tidb cluster in /tmp/tidb_cdc_test/multi_tables_ddl_v2 Starting Upstream PD... Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc) 3727120896 bytes in 19.18 secs (194327792 bytes/sec) [Pipeline] { [Pipeline] cache Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc) 3727120896 bytes in 7.83 secs (476061525 bytes/sec) [Pipeline] { [Pipeline] cache VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b25205c0004 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fgs7x-pq0q7, pid:6131, start at 2024-05-04 16:47:18.554912018 +0800 CST m=+5.178736904 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:49:18.561 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:47:18.551 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:37:18.551 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b25205c0004 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fgs7x-pq0q7, pid:6131, start at 2024-05-04 16:47:18.554912018 +0800 CST m=+5.178736904 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:49:18.561 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:47:18.551 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:37:18.551 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2521100014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fgs7x-pq0q7, pid:6207, start at 2024-05-04 16:47:18.630881974 +0800 CST m=+5.198895747 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:49:18.637 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:47:18.596 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:37:18.596 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [Sat May 4 16:47:21 CST 2024] <<<<<< START cdc server in multi_tables_ddl_v2 case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_tables_ddl_v2.76497651.out server --log-file /tmp/tidb_cdc_test/multi_tables_ddl_v2/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_tables_ddl_v2/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:47:24 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2011c52f-8c26-4ee9-8a12-6483ee6e85c3 {"id":"2011c52f-8c26-4ee9-8a12-6483ee6e85c3","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812442} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42c91befca 2011c52f-8c26-4ee9-8a12-6483ee6e85c3 /tidb/cdc/default/default/upstream/7365063292841021723 {"id":7365063292841021723,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2011c52f-8c26-4ee9-8a12-6483ee6e85c3 {"id":"2011c52f-8c26-4ee9-8a12-6483ee6e85c3","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812442} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42c91befca 2011c52f-8c26-4ee9-8a12-6483ee6e85c3 /tidb/cdc/default/default/upstream/7365063292841021723 {"id":7365063292841021723,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2011c52f-8c26-4ee9-8a12-6483ee6e85c3 {"id":"2011c52f-8c26-4ee9-8a12-6483ee6e85c3","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812442} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42c91befca 2011c52f-8c26-4ee9-8a12-6483ee6e85c3 /tidb/cdc/default/default/upstream/7365063292841021723 {"id":7365063292841021723,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: test-normal Info: {"upstream_id":7365063292841021723,"namespace":"default","id":"test-normal","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-tables-ddl-test-normal-30595?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:47:25.063978315+08:00","start_ts":449527792730374145,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["multi_tables_ddl_test.t1","multi_tables_ddl_test.t2","multi_tables_ddl_test.t3","multi_tables_ddl_test.t4","multi_tables_ddl_test.t1_7","multi_tables_ddl_test.t2_7","multi_tables_ddl_test.finish_mark"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527792730374145,"checkpoint_ts":449527792730374145,"checkpoint_time":"2024-05-04 16:47:21.751"} Create changefeed successfully! ID: test-error-1 Info: {"upstream_id":7365063292841021723,"namespace":"default","id":"test-error-1","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-tables-ddl-test-error-1-16378?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:47:25.25957814+08:00","start_ts":449527792730374145,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["multi_tables_ddl_test.t5","multi_tables_ddl_test.t6","multi_tables_ddl_test.t7","multi_tables_ddl_test.t8"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527792730374145,"checkpoint_ts":449527792730374145,"checkpoint_time":"2024-05-04 16:47:21.751"} Create changefeed successfully! ID: test-error-2 Info: {"upstream_id":7365063292841021723,"namespace":"default","id":"test-error-2","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-tables-ddl-test-error-2-22276?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:47:25.460013412+08:00","start_ts":449527792730374145,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["multi_tables_ddl_test.t9","multi_tables_ddl_test.t10"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527792730374145,"checkpoint_ts":449527792730374145,"checkpoint_time":"2024-05-04 16:47:21.751"} [Sat May 4 16:47:25 CST 2024] <<<<<< START kafka consumer in multi_tables_ddl_v2 case >>>>>> [Sat May 4 16:47:25 CST 2024] <<<<<< START kafka consumer in multi_tables_ddl_v2 case >>>>>> [Sat May 4 16:47:25 CST 2024] <<<<<< START kafka consumer in multi_tables_ddl_v2 case >>>>>> table multi_tables_ddl_test.t55 exists table multi_tables_ddl_test.t66 exists table multi_tables_ddl_test.t7 exists table multi_tables_ddl_test.t88 exists table multi_tables_ddl_test.finish_mark not exists for 1-th check, retry later Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc) 3727120896 bytes in 20.95 secs (177876229 bytes/sec) [Pipeline] { [Pipeline] cache table multi_tables_ddl_test.finish_mark exists check table exists success + endpoints=http://127.0.0.1:2379 + changefeed_id=test-normal + expected_state=normal + error_msg=null + tls_dir= + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test-normal -s + info='{ "upstream_id": 7365063292841021723, "namespace": "default", "id": "test-normal", "state": "normal", "checkpoint_tso": 449527797199929368, "checkpoint_time": "2024-05-04 16:47:38.801", "error": null }' + echo '{ "upstream_id": 7365063292841021723, "namespace": "default", "id": "test-normal", "state": "normal", "checkpoint_tso": 449527797199929368, "checkpoint_time": "2024-05-04 16:47:38.801", "error": null }' { "upstream_id": 7365063292841021723, "namespace": "default", "id": "test-normal", "state": "normal", "checkpoint_tso": 449527797199929368, "checkpoint_time": "2024-05-04 16:47:38.801", "error": null } ++ echo '{' '"upstream_id":' 7365063292841021723, '"namespace":' '"default",' '"id":' '"test-normal",' '"state":' '"normal",' '"checkpoint_tso":' 449527797199929368, '"checkpoint_time":' '"2024-05-04' '16:47:38.801",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365063292841021723, '"namespace":' '"default",' '"id":' '"test-normal",' '"state":' '"normal",' '"checkpoint_tso":' 449527797199929368, '"checkpoint_time":' '"2024-05-04' '16:47:38.801",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] + endpoints=http://127.0.0.1:2379 + changefeed_id=test-error-1 + expected_state=normal + error_msg=null + tls_dir= + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test-error-1 -s + info='{ "upstream_id": 7365063292841021723, "namespace": "default", "id": "test-error-1", "state": "normal", "checkpoint_tso": 449527797894873098, "checkpoint_time": "2024-05-04 16:47:41.452", "error": null }' + echo '{ "upstream_id": 7365063292841021723, "namespace": "default", "id": "test-error-1", "state": "normal", "checkpoint_tso": 449527797894873098, "checkpoint_time": "2024-05-04 16:47:41.452", "error": null }' { "upstream_id": 7365063292841021723, "namespace": "default", "id": "test-error-1", "state": "normal", "checkpoint_tso": 449527797894873098, "checkpoint_time": "2024-05-04 16:47:41.452", "error": null } ++ echo '{' '"upstream_id":' 7365063292841021723, '"namespace":' '"default",' '"id":' '"test-error-1",' '"state":' '"normal",' '"checkpoint_tso":' 449527797894873098, '"checkpoint_time":' '"2024-05-04' '16:47:41.452",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365063292841021723, '"namespace":' '"default",' '"id":' '"test-error-1",' '"state":' '"normal",' '"checkpoint_tso":' 449527797894873098, '"checkpoint_time":' '"2024-05-04' '16:47:41.452",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] + endpoints=http://127.0.0.1:2379 + changefeed_id=test-error-2 + expected_state=failed + error_msg=ErrSyncRenameTableFailed + tls_dir= + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test-error-2 -s + info='{ "upstream_id": 7365063292841021723, "namespace": "default", "id": "test-error-2", "state": "failed", "checkpoint_tso": 449527796846034946, "checkpoint_time": "2024-05-04 16:47:37.451", "error": { "time": "2024-05-04T16:47:38.503625636+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSyncRenameTableFailed", "message": "[CDC:ErrSyncRenameTableFailed]table'\''s old name is not in filter rule, and its new name in filter rule table id '\''130'\'', ddl query: [rename table t11 to t9], it'\''s an unexpected behavior, if you want to replicate this table, please add its old name to filter rule." } }' + echo '{ "upstream_id": 7365063292841021723, "namespace": "default", "id": "test-error-2", "state": "failed", "checkpoint_tso": 449527796846034946, "checkpoint_time": "2024-05-04 16:47:37.451", "error": { "time": "2024-05-04T16:47:38.503625636+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSyncRenameTableFailed", "message": "[CDC:ErrSyncRenameTableFailed]table'\''s old name is not in filter rule, and its new name in filter rule table id '\''130'\'', ddl query: [rename table t11 to t9], it'\''s an unexpected behavior, if you want to replicate this table, please add its old name to filter rule." } }' { "upstream_id": 7365063292841021723, "namespace": "default", "id": "test-error-2", "state": "failed", "checkpoint_tso": 449527796846034946, "checkpoint_time": "2024-05-04 16:47:37.451", "error": { "time": "2024-05-04T16:47:38.503625636+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSyncRenameTableFailed", "message": "[CDC:ErrSyncRenameTableFailed]table's old name is not in filter rule, and its new name in filter rule table id '130', ddl query: [rename table t11 to t9], it's an unexpected behavior, if you want to replicate this table, please add its old name to filter rule." } } ++ jq -r .state ++ echo '{' '"upstream_id":' 7365063292841021723, '"namespace":' '"default",' '"id":' '"test-error-2",' '"state":' '"failed",' '"checkpoint_tso":' 449527796846034946, '"checkpoint_time":' '"2024-05-04' '16:47:37.451",' '"error":' '{' '"time":' '"2024-05-04T16:47:38.503625636+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrSyncRenameTableFailed",' '"message":' '"[CDC:ErrSyncRenameTableFailed]table'\''s' old name is not in filter rule, and its new name in filter rule table id ''\''130'\'',' ddl query: '[rename' table t11 to 't9],' 'it'\''s' an unexpected behavior, if you want to replicate this table, please add its old name to filter 'rule."' '}' '}' + state=failed + [[ ! failed == \f\a\i\l\e\d ]] ++ jq -r .error.message ++ echo '{' '"upstream_id":' 7365063292841021723, '"namespace":' '"default",' '"id":' '"test-error-2",' '"state":' '"failed",' '"checkpoint_tso":' 449527796846034946, '"checkpoint_time":' '"2024-05-04' '16:47:37.451",' '"error":' '{' '"time":' '"2024-05-04T16:47:38.503625636+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrSyncRenameTableFailed",' '"message":' '"[CDC:ErrSyncRenameTableFailed]table'\''s' old name is not in filter rule, and its new name in filter rule table id ''\''130'\'',' ddl query: '[rename' table t11 to 't9],' 'it'\''s' an unexpected behavior, if you want to replicate this table, please add its old name to filter 'rule."' '}' '}' + message='[CDC:ErrSyncRenameTableFailed]table'\''s old name is not in filter rule, and its new name in filter rule table id '\''130'\'', ddl query: [rename table t11 to t9], it'\''s an unexpected behavior, if you want to replicate this table, please add its old name to filter rule.' + [[ ! [CDC:ErrSyncRenameTableFailed]table's old name is not in filter rule, and its new name in filter rule table id '130', ddl query: [rename table t11 to t9], it's an unexpected behavior, if you want to replicate this table, please add its old name to filter rule. =~ ErrSyncRenameTableFailed ]] check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... wait process cdc.test exit for 3-th time... cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 16:47:44 CST 2024] <<<<<< run test case multi_tables_ddl_v2 success! >>>>>> Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc) 3727120896 bytes in 8.05 secs (462800816 bytes/sec) [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] sh [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] } [Pipeline] } [Pipeline] checkout [Pipeline] checkout The recommended git tool is: git [Pipeline] checkout The recommended git tool is: git [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { The recommended git tool is: git [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_topics_v2/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // timeout [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] { No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@3c17124e; decorates RemoteLauncher[hudson.remoting.Channel@63590b5a:JNLP4-connect connection from 10.233.72.221/10.233.72.221:33400] will be ignored (a typical symptom is the Git executable not being run inside a designated container) No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@1d84efee; decorates RemoteLauncher[hudson.remoting.Channel@10d53ca4:JNLP4-connect connection from 10.233.73.78/10.233.73.78:36732] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 Cloning the remote Git repository Using shallow clone with depth 1 [Pipeline] { [Pipeline] { Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 [Pipeline] // container Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 [Pipeline] // container No credentials specified Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@3345258f; decorates RemoteLauncher[hudson.remoting.Channel@3f33c2ff:JNLP4-connect connection from 10.233.69.76/10.233.69.76:35276] will be ignored (a typical symptom is the Git executable not being run inside a designated container) Cloning the remote Git repository Using shallow clone with depth 1 [Pipeline] // container Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10 [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] sh [Pipeline] sh [Pipeline] sh Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G07 Run cases: kv_client_stream_reconnect cdc split_region PROW_JOB_ID=8011d7ec-925e-42c5-a960-c8067df92ded JENKINS_NODE_COOKIE=81bfa6be-e149-4b3c-9875-901a69f21bc0 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-kjzln-hfb2r HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786675747499282435","prowjobid":"8011d7ec-925e-42c5-a960-c8067df92ded","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/c950cce3a9b105fd95bb2c788e1ab69ec32e0668","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1836 TEST_GROUP=G07 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786675747499282435 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-kjzln GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-kjzln-hfb2r pingcap_tiflow_pull_cdc_integration_kafka_test_1836-kjzln GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-kjzln-hfb2r GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1836 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kv_client_stream_reconnect/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G00 Run cases: bdr_mode capture_suicide_while_balance_table syncpoint hang_sink_suicide server_config_compatibility changefeed_dup_error_restart kafka_big_messages kafka_compression kafka_messages kafka_sink_error_resume mq_sink_lost_callback mq_sink_dispatcher kafka_column_selector kafka_column_selector_avro debezium lossy_ddl storage_csv_update PROW_JOB_ID=8011d7ec-925e-42c5-a960-c8067df92ded JENKINS_NODE_COOKIE=6890f7a7-ba04-4cad-bb90-56c1e048d830 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786675747499282435","prowjobid":"8011d7ec-925e-42c5-a960-c8067df92ded","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/c950cce3a9b105fd95bb2c788e1ab69ec32e0668","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1836 TEST_GROUP=G00 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786675747499282435 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-r5bkm GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-r5bkm pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1836 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/bdr_mode/run.sh using Sink-Type: kafka... <<================= [Sat May 4 16:47:59 CST 2024] <<<<<< run test case bdr_mode success! >>>>>> + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G04 Run cases: foreign_key ddl_puller_lag ddl_only_block_related_table changefeed_auto_stop PROW_JOB_ID=8011d7ec-925e-42c5-a960-c8067df92ded JENKINS_NODE_COOKIE=9a8ccc94-f5d9-42d7-8b3e-851b24eade2b BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3bwq1-gzbp1 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786675747499282435","prowjobid":"8011d7ec-925e-42c5-a960-c8067df92ded","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/c950cce3a9b105fd95bb2c788e1ab69ec32e0668","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1836 TEST_GROUP=G04 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786675747499282435 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-3bwq1 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3bwq1-gzbp1 pingcap_tiflow_pull_cdc_integration_kafka_test_1836-3bwq1 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3bwq1-gzbp1 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1836 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/foreign_key/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G05 Run cases: charset_gbk ddl_manager multi_source PROW_JOB_ID=8011d7ec-925e-42c5-a960-c8067df92ded JENKINS_NODE_COOKIE=f2e4f23e-1cf3-4b72-9b32-c6209b1baf80 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-sj989-z6hpm HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786675747499282435","prowjobid":"8011d7ec-925e-42c5-a960-c8067df92ded","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/c950cce3a9b105fd95bb2c788e1ab69ec32e0668","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1836 TEST_GROUP=G05 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786675747499282435 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-sj989 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-sj989-z6hpm pingcap_tiflow_pull_cdc_integration_kafka_test_1836-sj989 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-sj989-z6hpm GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1836 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/charset_gbk/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 start tidb cluster in /tmp/tidb_cdc_test/multi_topics_v2 Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... [Pipeline] // container Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) [Pipeline] // container [Pipeline] sh Commit message: "fix(br): use failpoint tidb-server instead (#2951)" + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G03 Run cases: row_format drop_many_tables processor_stop_delay partition_table PROW_JOB_ID=8011d7ec-925e-42c5-a960-c8067df92ded JENKINS_NODE_COOKIE=07feabdc-df4c-433b-a73d-043da1639097 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fzbr4-3vph9 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786675747499282435","prowjobid":"8011d7ec-925e-42c5-a960-c8067df92ded","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/c950cce3a9b105fd95bb2c788e1ab69ec32e0668","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1836 TEST_GROUP=G03 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786675747499282435 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-fzbr4 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-fzbr4 pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fzbr4-3vph9 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fzbr4-3vph9 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1836 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/row_format/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G06 Run cases: sink_retry changefeed_error ddl_sequence resourcecontrol PROW_JOB_ID=8011d7ec-925e-42c5-a960-c8067df92ded JENKINS_NODE_COOKIE=5af09413-3678-4bee-beab-5ba460f5be20 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tvq5x-hqf9m HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786675747499282435","prowjobid":"8011d7ec-925e-42c5-a960-c8067df92ded","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/c950cce3a9b105fd95bb2c788e1ab69ec32e0668","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1836 TEST_GROUP=G06 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786675747499282435 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-tvq5x GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-tvq5x pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tvq5x-hqf9m GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tvq5x-hqf9m GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1836 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sink_retry/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Commit message: "fix(br): use failpoint tidb-server instead (#2951)" + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G01 Run cases: open_protocol_handle_key_only PROW_JOB_ID=8011d7ec-925e-42c5-a960-c8067df92ded JENKINS_NODE_COOKIE=d3692e10-c520-4c9e-adf8-96057e4da836 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3hd5h-7tzbj HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786675747499282435","prowjobid":"8011d7ec-925e-42c5-a960-c8067df92ded","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/c950cce3a9b105fd95bb2c788e1ab69ec32e0668","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1836 TEST_GROUP=G01 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786675747499282435 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-3hd5h GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3hd5h-7tzbj pingcap_tiflow_pull_cdc_integration_kafka_test_1836-3hd5h GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3hd5h-7tzbj GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1836 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/open_protocol_handle_key_only/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { Avoid second fetch Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main) > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { Commit message: "fix(br): use failpoint tidb-server instead (#2951)" [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10 [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] cache + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G08 Run cases: processor_err_chan changefeed_reconstruct multi_capture synced_status_with_redo PROW_JOB_ID=8011d7ec-925e-42c5-a960-c8067df92ded JENKINS_NODE_COOKIE=3c9481bd-eb69-4d8c-9e23-2eedcdc37f66 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-ppnsc-4cvmp HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786675747499282435","prowjobid":"8011d7ec-925e-42c5-a960-c8067df92ded","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/c950cce3a9b105fd95bb2c788e1ab69ec32e0668","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1836 TEST_GROUP=G08 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786675747499282435 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-ppnsc GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-ppnsc-4cvmp pingcap_tiflow_pull_cdc_integration_kafka_test_1836-ppnsc GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-ppnsc-4cvmp GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1836 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_err_chan/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Verifying downstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/kv_client_stream_reconnect Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/capture_suicide_while_balance_table/run.sh using Sink-Type: kafka... <<================= [Sat May 4 16:48:02 CST 2024] <<<<<< run test case capture_suicide_while_balance_table success! >>>>>> start tidb cluster in /tmp/tidb_cdc_test/foreign_key Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/row_format Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release start tidb cluster in /tmp/tidb_cdc_test/sink_retry Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/syncpoint/run.sh using Sink-Type: kafka... <<================= kafka downstream isn't support syncpoint record [Sat May 4 16:48:05 CST 2024] <<<<<< run test case syncpoint success! >>>>>> Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) start tidb cluster in /tmp/tidb_cdc_test/processor_err_chan Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) start tidb cluster in /tmp/tidb_cdc_test/open_protocol_handle_key_only Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/charset_gbk Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2855b00013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fgs7x-pq0q7, pid:9312, start at 2024-05-04 16:48:11.15323434 +0800 CST m=+5.100973463 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:11.159 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:11.165 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:11.165 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2855b00013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fgs7x-pq0q7, pid:9312, start at 2024-05-04 16:48:11.15323434 +0800 CST m=+5.100973463 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:11.159 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:11.165 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:11.165 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b285758000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fgs7x-pq0q7, pid:9398, start at 2024-05-04 16:48:11.235930808 +0800 CST m=+5.133879614 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:11.242 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:11.222 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:11.222 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_topics_v2/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_topics_v2/tiflash-proxy.toml"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/hang_sink_suicide/run.sh using Sink-Type: kafka... <<================= [Sat May 4 16:48:11 CST 2024] <<<<<< run test case hang_sink_suicide success! >>>>>> Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b28650c0019 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-kjzln-hfb2r, pid:1352, start at 2024-05-04 16:48:12.145542798 +0800 CST m=+5.153190433 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:12.152 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:12.149 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:12.149 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b28650c0019 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-kjzln-hfb2r, pid:1352, start at 2024-05-04 16:48:12.145542798 +0800 CST m=+5.153190433 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:12.152 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:12.149 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:12.149 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2865000010 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-kjzln-hfb2r, pid:1442, start at 2024-05-04 16:48:12.113115821 +0800 CST m=+5.064643140 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:12.122 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:12.096 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:12.096 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b286b140013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3bwq1-gzbp1, pid:1347, start at 2024-05-04 16:48:12.509712569 +0800 CST m=+5.082987867 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:12.515 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:12.485 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:12.485 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b286b140013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3bwq1-gzbp1, pid:1347, start at 2024-05-04 16:48:12.509712569 +0800 CST m=+5.082987867 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:12.515 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:12.485 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:12.485 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b286d500014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3bwq1-gzbp1, pid:1422, start at 2024-05-04 16:48:12.659517908 +0800 CST m=+5.182437440 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:12.666 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:12.628 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:12.628 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/foreign_key/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/foreign_key/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/foreign_key/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/foreign_key/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/foreign_key/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_topics_v2.cli.10779.out cli tso query --pd=http://127.0.0.1:2379 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2883f0000e Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fzbr4-3vph9, pid:1287, start at 2024-05-04 16:48:14.089879369 +0800 CST m=+5.276547319 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:14.096 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:14.076 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:14.076 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Sat May 4 16:48:15 CST 2024] <<<<<< START cdc server in kv_client_stream_reconnect case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/kv/kvClientForceReconnect=return(true)' + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kv_client_stream_reconnect.28322834.out server --log-file /tmp/tidb_cdc_test/kv_client_stream_reconnect/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kv_client_stream_reconnect/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + set +x + tso='449527806601723905 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527806601723905 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 16:48:16 CST 2024] <<<<<< START cdc server in multi_topics_v2 case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_topics_v2.1082010822.out server --log-file /tmp/tidb_cdc_test/multi_topics_v2/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_topics_v2/cdc_data --cluster-id default + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.foreign_key.cli.2821.out cli tso query --pd=http://127.0.0.1:2379 find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/server_config_compatibility/run.sh using Sink-Type: kafka... <<================= [Sat May 4 16:48:16 CST 2024] <<<<<< run test case server_config_compatibility success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2883f0000e Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fzbr4-3vph9, pid:1287, start at 2024-05-04 16:48:14.089879369 +0800 CST m=+5.276547319 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:14.096 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:14.076 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:14.076 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2883cc0012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fzbr4-3vph9, pid:1365, start at 2024-05-04 16:48:14.086080237 +0800 CST m=+5.219979411 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:14.093 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:14.067 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:14.067 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/row_format/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/row_format/tiflash/log/error.log arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/row_format/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/row_format/tiflash-proxy.toml"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/row_format/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2894cc0015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tvq5x-hqf9m, pid:1350, start at 2024-05-04 16:48:15.201530073 +0800 CST m=+6.103503639 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:15.209 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:15.206 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:15.206 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2894cc0015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tvq5x-hqf9m, pid:1350, start at 2024-05-04 16:48:15.201530073 +0800 CST m=+6.103503639 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:15.209 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:15.206 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:15.206 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2895800015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tvq5x-hqf9m, pid:1434, start at 2024-05-04 16:48:15.240205288 +0800 CST m=+6.087825193 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:15.246 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:15.249 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:15.249 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/sink_retry/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/sink_retry/tiflash/log/error.log arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/sink_retry/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/sink_retry/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/sink_retry/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + set +x + tso='449527806973444097 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527806973444097 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 16:48:17 CST 2024] <<<<<< START cdc server in foreign_key case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.foreign_key.28562858.out server --log-file /tmp/tidb_cdc_test/foreign_key/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/foreign_key/cdc_data --cluster-id default + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:48:18 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/eb671781-5225-400a-b41f-60b2d1b45a5b {"id":"eb671781-5225-400a-b41f-60b2d1b45a5b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812495} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42c9f35ac4 eb671781-5225-400a-b41f-60b2d1b45a5b /tidb/cdc/default/default/upstream/7365063534126194260 {"id":7365063534126194260,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/eb671781-5225-400a-b41f-60b2d1b45a5b {"id":"eb671781-5225-400a-b41f-60b2d1b45a5b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812495} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42c9f35ac4 eb671781-5225-400a-b41f-60b2d1b45a5b /tidb/cdc/default/default/upstream/7365063534126194260 {"id":7365063534126194260,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/eb671781-5225-400a-b41f-60b2d1b45a5b {"id":"eb671781-5225-400a-b41f-60b2d1b45a5b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812495} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42c9f35ac4 eb671781-5225-400a-b41f-60b2d1b45a5b /tidb/cdc/default/default/upstream/7365063534126194260 {"id":7365063534126194260,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Sat May 4 16:48:18 CST 2024] <<<<<< START kafka consumer in kv_client_stream_reconnect case >>>>>> + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.row_format.cli.2799.out cli tso query --pd=http://127.0.0.1:2379 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b28c2f4000a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-sj989-z6hpm, pid:1292, start at 2024-05-04 16:48:18.118573901 +0800 CST m=+5.166377041 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:18.125 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:18.109 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:18.109 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sink_retry.cli.2837.out cli tso query --pd=http://127.0.0.1:2379 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:48:19 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a2c359fe-fb0f-4e71-bc67-cb3c5c02be2a {"id":"a2c359fe-fb0f-4e71-bc67-cb3c5c02be2a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812496} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42c9e941cf a2c359fe-fb0f-4e71-bc67-cb3c5c02be2a /tidb/cdc/default/default/upstream/7365063521083303880 {"id":7365063521083303880,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a2c359fe-fb0f-4e71-bc67-cb3c5c02be2a {"id":"a2c359fe-fb0f-4e71-bc67-cb3c5c02be2a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812496} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42c9e941cf a2c359fe-fb0f-4e71-bc67-cb3c5c02be2a /tidb/cdc/default/default/upstream/7365063521083303880 {"id":7365063521083303880,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a2c359fe-fb0f-4e71-bc67-cb3c5c02be2a {"id":"a2c359fe-fb0f-4e71-bc67-cb3c5c02be2a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812496} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42c9e941cf a2c359fe-fb0f-4e71-bc67-cb3c5c02be2a /tidb/cdc/default/default/upstream/7365063521083303880 {"id":7365063521083303880,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_topics_v2.cli.10874.out cli changefeed create --start-ts=449527806601723905 '--sink-uri=kafka://127.0.0.1:9092/multi_topics?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1' --config /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_topics_v2/conf/changefeed.toml Create changefeed successfully! ID: 9d4ebac0-8fc7-402f-a549-fae648e1b2cd Info: {"upstream_id":7365063521083303880,"namespace":"default","id":"9d4ebac0-8fc7-402f-a549-fae648e1b2cd","sink_uri":"kafka://127.0.0.1:9092/multi_topics?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1","create_time":"2024-05-04T16:48:19.691691197+08:00","start_ts":449527806601723905,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"dispatchers":[{"matcher":["test.*"],"topic":"{schema}_{table}"}],"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527806601723905,"checkpoint_ts":449527806601723905,"checkpoint_time":"2024-05-04 16:48:14.666"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b28c2f4000d Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3hd5h-7tzbj, pid:1294, start at 2024-05-04 16:48:18.118968525 +0800 CST m=+5.018605468 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:18.126 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:18.109 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:18.109 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b28c2f4000d Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3hd5h-7tzbj, pid:1294, start at 2024-05-04 16:48:18.118968525 +0800 CST m=+5.018605468 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:18.126 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:18.109 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:18.109 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b28c6c40016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3hd5h-7tzbj, pid:1383, start at 2024-05-04 16:48:18.384635669 +0800 CST m=+5.227519679 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:18.394 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:18.353 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:18.353 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/log/error.log arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash-proxy.toml"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/open_protocol_handle_key_only/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_dup_error_restart/run.sh using Sink-Type: kafka... <<================= [Sat May 4 16:48:19 CST 2024] <<<<<< run test case changefeed_dup_error_restart success! >>>>>> VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b28c1000013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-ppnsc-4cvmp, pid:1408, start at 2024-05-04 16:48:18.019543274 +0800 CST m=+5.791209542 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:18.025 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:17.984 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:17.984 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b28c1000013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-ppnsc-4cvmp, pid:1408, start at 2024-05-04 16:48:18.019543274 +0800 CST m=+5.791209542 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:18.025 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:17.984 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:17.984 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b28c28c0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-ppnsc-4cvmp, pid:1481, start at 2024-05-04 16:48:18.112824618 +0800 CST m=+5.830289276 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:18.119 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:18.083 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:18.083 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/processor_err_chan/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/processor_err_chan/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/processor_err_chan/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/processor_err_chan/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/processor_err_chan/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:48:20 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f41fd815-6a8c-42be-a333-2cfc1e4243aa {"id":"f41fd815-6a8c-42be-a333-2cfc1e4243aa","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812497} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42c9f50cce f41fd815-6a8c-42be-a333-2cfc1e4243aa /tidb/cdc/default/default/upstream/7365063538533169830 {"id":7365063538533169830,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f41fd815-6a8c-42be-a333-2cfc1e4243aa {"id":"f41fd815-6a8c-42be-a333-2cfc1e4243aa","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812497} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42c9f50cce f41fd815-6a8c-42be-a333-2cfc1e4243aa /tidb/cdc/default/default/upstream/7365063538533169830 {"id":7365063538533169830,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f41fd815-6a8c-42be-a333-2cfc1e4243aa {"id":"f41fd815-6a8c-42be-a333-2cfc1e4243aa","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812497} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42c9f50cce f41fd815-6a8c-42be-a333-2cfc1e4243aa /tidb/cdc/default/default/upstream/7365063538533169830 {"id":7365063538533169830,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.foreign_key.cli.2915.out cli changefeed create --start-ts=449527806973444097 '--sink-uri=kafka://127.0.0.1:9092/ticdc-foreign-key-test-4197?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' + set +x + tso='449527807849529345 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527807849529345 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 16:48:20 CST 2024] <<<<<< START cdc server in row_format case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.row_format.28332835.out server --log-file /tmp/tidb_cdc_test/row_format/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/row_format/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + set +x VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b28c2f4000a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-sj989-z6hpm, pid:1292, start at 2024-05-04 16:48:18.118573901 +0800 CST m=+5.166377041 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:18.125 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:18.109 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:18.109 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b28ef64000a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-sj989-z6hpm, pid:1371, start at 2024-05-04 16:48:20.963166348 +0800 CST m=+7.953427947 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:20.969 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:20.953 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:20.953 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Create changefeed successfully! ID: cd38a649-9049-484b-9ae3-e11b5848367d Info: {"upstream_id":7365063538533169830,"namespace":"default","id":"cd38a649-9049-484b-9ae3-e11b5848367d","sink_uri":"kafka://127.0.0.1:9092/ticdc-foreign-key-test-4197?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:48:21.119703158+08:00","start_ts":449527806973444097,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527806973444097,"checkpoint_ts":449527806973444097,"checkpoint_time":"2024-05-04 16:48:16.084"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + set +x + tso='449527807935774721 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527807935774721 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/charset_gbk/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/charset_gbk/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/charset_gbk/tiflash/log/proxy.log"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/charset_gbk/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/charset_gbk/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ***************** properties ***************** "mysql.db"="sink_retry" "mysql.user"="root" "workload"="core" "dotransactions"="false" "scanproportion"="0" "mysql.port"="4000" "mysql.host"="127.0.0.1" "insertproportion"="0" "operationcount"="0" "updateproportion"="0" "threadcount"="2" "readproportion"="0" "readallfields"="true" "requestdistribution"="uniform" "recordcount"="10" ********************************************** Run finished, takes 7.702946ms INSERT - Takes(s): 0.0, Count: 10, OPS: 2318.7, Avg(us): 1468, Min(us): 824, Max(us): 3292, 95th(us): 4000, 99th(us): 4000 [Sat May 4 16:48:21 CST 2024] <<<<<< START cdc server in sink_retry case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/sink/dmlsink/txn/mysql/MySQLSinkTxnRandomError=25%return(true)' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sink_retry.28942896.out server --log-file /tmp/tidb_cdc_test/sink_retry/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/sink_retry/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.open_protocol_handle_key_only.cli.2837.out cli tso query --pd=http://127.0.0.1:2379 + set +x [Sat May 4 16:48:22 CST 2024] <<<<<< START kafka consumer in foreign_key case >>>>>> find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_big_messages/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... + set +x + tso='449527808461111297 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527808461111297 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 16:48:23 CST 2024] <<<<<< START cdc server in open_protocol_handle_key_only case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + GO_FAILPOINTS= + etcd_info_msg='etcd info' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.open_protocol_handle_key_only.28742876.out server --log-file /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/open_protocol_handle_key_only/cdc_data --cluster-id default + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [Sat May 4 16:48:23 CST 2024] <<<<<< START cdc server in charset_gbk case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.charset_gbk.28602862.out server --log-file /tmp/tidb_cdc_test/charset_gbk/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/charset_gbk/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:48:24 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6e189627-af1f-480e-9e26-77574519d44d {"id":"6e189627-af1f-480e-9e26-77574519d44d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812501} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42c9f88ad3 6e189627-af1f-480e-9e26-77574519d44d /tidb/cdc/default/default/upstream/7365063544570820031 {"id":7365063544570820031,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6e189627-af1f-480e-9e26-77574519d44d {"id":"6e189627-af1f-480e-9e26-77574519d44d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812501} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42c9f88ad3 6e189627-af1f-480e-9e26-77574519d44d /tidb/cdc/default/default/upstream/7365063544570820031 {"id":7365063544570820031,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6e189627-af1f-480e-9e26-77574519d44d {"id":"6e189627-af1f-480e-9e26-77574519d44d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812501} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42c9f88ad3 6e189627-af1f-480e-9e26-77574519d44d /tidb/cdc/default/default/upstream/7365063544570820031 {"id":7365063544570820031,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.row_format.cli.2893.out cli changefeed create --start-ts=449527807849529345 '--sink-uri=kafka://127.0.0.1:9092/ticdc-row-format-test-30007?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: 493a53a9-a8e9-4354-99ee-93bd2bc6e952 Info: {"upstream_id":7365063544570820031,"namespace":"default","id":"493a53a9-a8e9-4354-99ee-93bd2bc6e952","sink_uri":"kafka://127.0.0.1:9092/ticdc-row-format-test-30007?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:48:24.473475167+08:00","start_ts":449527807849529345,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527807849529345,"checkpoint_ts":449527807849529345,"checkpoint_time":"2024-05-04 16:48:19.426"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:48:24 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ec5dce4d-5c80-4bd2-a8f6-2c5c31d1b728 {"id":"ec5dce4d-5c80-4bd2-a8f6-2c5c31d1b728","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812501} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42c9f9a1ee ec5dce4d-5c80-4bd2-a8f6-2c5c31d1b728 /tidb/cdc/default/default/upstream/7365063546198092789 {"id":7365063546198092789,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ec5dce4d-5c80-4bd2-a8f6-2c5c31d1b728 {"id":"ec5dce4d-5c80-4bd2-a8f6-2c5c31d1b728","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812501} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42c9f9a1ee ec5dce4d-5c80-4bd2-a8f6-2c5c31d1b728 /tidb/cdc/default/default/upstream/7365063546198092789 {"id":7365063546198092789,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ec5dce4d-5c80-4bd2-a8f6-2c5c31d1b728 {"id":"ec5dce4d-5c80-4bd2-a8f6-2c5c31d1b728","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812501} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42c9f9a1ee ec5dce4d-5c80-4bd2-a8f6-2c5c31d1b728 /tidb/cdc/default/default/upstream/7365063546198092789 {"id":7365063546198092789,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sink_retry.cli.2954.out cli changefeed create --start-ts=449527807935774721 '--sink-uri=kafka://127.0.0.1:9092/ticdc-sink-retry-test-26143?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: 70a66bbe-c903-48de-977b-f6bab2b21f45 Info: {"upstream_id":7365063546198092789,"namespace":"default","id":"70a66bbe-c903-48de-977b-f6bab2b21f45","sink_uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-26143?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:48:24.923619388+08:00","start_ts":449527807935774721,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527807935774721,"checkpoint_ts":449527807935774721,"checkpoint_time":"2024-05-04 16:48:19.755"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + set +x [Sat May 4 16:48:25 CST 2024] <<<<<< START kafka consumer in row_format case >>>>>> + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > + set +x [Sat May 4 16:48:26 CST 2024] <<<<<< START kafka consumer in sink_retry case >>>>>> start tidb cluster in /tmp/tidb_cdc_test/kafka_big_messages Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc) 3727120896 bytes in 22.58 secs (165070216 bytes/sec) [Pipeline] { < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:48:26 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/586fdd83-5d36-43f0-805e-df4565297eb2 {"id":"586fdd83-5d36-43f0-805e-df4565297eb2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812503} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ca08aeca 586fdd83-5d36-43f0-805e-df4565297eb2 /tidb/cdc/default/default/upstream/7365063561068243464 {"id":7365063561068243464,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/586fdd83-5d36-43f0-805e-df4565297eb2 {"id":"586fdd83-5d36-43f0-805e-df4565297eb2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812503} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ca08aeca 586fdd83-5d36-43f0-805e-df4565297eb2 /tidb/cdc/default/default/upstream/7365063561068243464 {"id":7365063561068243464,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/586fdd83-5d36-43f0-805e-df4565297eb2 {"id":"586fdd83-5d36-43f0-805e-df4565297eb2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812503} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ca08aeca 586fdd83-5d36-43f0-805e-df4565297eb2 /tidb/cdc/default/default/upstream/7365063561068243464 {"id":7365063561068243464,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: faa63ed1-56b2-4a50-902e-fee9e6445f2c Info: {"upstream_id":7365063561068243464,"namespace":"default","id":"faa63ed1-56b2-4a50-902e-fee9e6445f2c","sink_uri":"mysql://normal:xxxxx@127.0.0.1:3306/","create_time":"2024-05-04T16:48:26.648533131+08:00","start_ts":449527808880541697,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527808880541697,"checkpoint_ts":449527808880541697,"checkpoint_time":"2024-05-04 16:48:23.359"} [Sat May 4 16:48:26 CST 2024] <<<<<< START kafka consumer in charset_gbk case >>>>>> + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:48:26 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/697380f9-b8c0-4317-a345-aafa7e4227cf {"id":"697380f9-b8c0-4317-a345-aafa7e4227cf","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812503} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ca0750ce 697380f9-b8c0-4317-a345-aafa7e4227cf /tidb/cdc/default/default/upstream/7365063564421771286 {"id":7365063564421771286,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/697380f9-b8c0-4317-a345-aafa7e4227cf {"id":"697380f9-b8c0-4317-a345-aafa7e4227cf","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812503} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ca0750ce 697380f9-b8c0-4317-a345-aafa7e4227cf /tidb/cdc/default/default/upstream/7365063564421771286 {"id":7365063564421771286,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/697380f9-b8c0-4317-a345-aafa7e4227cf {"id":"697380f9-b8c0-4317-a345-aafa7e4227cf","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812503} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ca0750ce 697380f9-b8c0-4317-a345-aafa7e4227cf /tidb/cdc/default/default/upstream/7365063564421771286 {"id":7365063564421771286,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.open_protocol_handle_key_only.cli.2930.out cli changefeed create --start-ts=449527808461111297 '--sink-uri=kafka://127.0.0.1:9092/open-protocol-handle-key-only?protocol=open-protocol&max-message-bytes=800&kafka-version=2.4.1' --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/open_protocol_handle_key_only/conf/changefeed.toml Create changefeed successfully! ID: 9ca8e703-864c-4bdb-a912-38f4e10fd5aa Info: {"upstream_id":7365063564421771286,"namespace":"default","id":"9ca8e703-864c-4bdb-a912-38f4e10fd5aa","sink_uri":"kafka://127.0.0.1:9092/open-protocol-handle-key-only?protocol=open-protocol\u0026max-message-bytes=800\u0026kafka-version=2.4.1","create_time":"2024-05-04T16:48:26.784611671+08:00","start_ts":449527808461111297,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"kafka_config":{"large_message_handle":{"large_message_handle_option":"handle-key-only","large_message_handle_compression":"lz4","claim_check_storage_uri":""}},"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527808461111297,"checkpoint_ts":449527808461111297,"checkpoint_time":"2024-05-04 16:48:21.759"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... [Pipeline] cache Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + set +x table test.finish_mark not exists for 1-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Sat May 4 16:48:29 CST 2024] <<<<<< START cdc server in processor_err_chan case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/ProcessorAddTableError=1*return(true)' + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.processor_err_chan.29632965.out server --log-file /tmp/tidb_cdc_test/processor_err_chan/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/processor_err_chan/cdc_data --cluster-id default --config /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_err_chan/conf/server.toml --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table test.finish_mark not exists for 2-th check, retry later table foreign_key.finish_mark not exists for 1-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.finish_mark exists check diff successfully + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:48:32 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ead3cff5-e707-4709-9620-6c3cbd54a480 {"id":"ead3cff5-e707-4709-9620-6c3cbd54a480","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812509} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ca01f381 ead3cff5-e707-4709-9620-6c3cbd54a480 /tidb/cdc/default/default/upstream/7365063560399879317 {"id":7365063560399879317,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ead3cff5-e707-4709-9620-6c3cbd54a480 {"id":"ead3cff5-e707-4709-9620-6c3cbd54a480","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812509} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ca01f381 ead3cff5-e707-4709-9620-6c3cbd54a480 /tidb/cdc/default/default/upstream/7365063560399879317 {"id":7365063560399879317,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ead3cff5-e707-4709-9620-6c3cbd54a480 {"id":"ead3cff5-e707-4709-9620-6c3cbd54a480","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812509} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ca01f381 ead3cff5-e707-4709-9620-6c3cbd54a480 /tidb/cdc/default/default/upstream/7365063560399879317 {"id":7365063560399879317,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 16:48:32 CST 2024] <<<<<< START kafka consumer in processor_err_chan case >>>>>> check_changefeed_state http://127.0.0.1:2379 573bb475-5f44-4128-903a-c795543987f2 normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=573bb475-5f44-4128-903a-c795543987f2 + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 573bb475-5f44-4128-903a-c795543987f2 -s + info='{ "upstream_id": 7365063560399879317, "namespace": "default", "id": "573bb475-5f44-4128-903a-c795543987f2", "state": "normal", "checkpoint_tso": 449527811298820101, "checkpoint_time": "2024-05-04 16:48:32.584", "error": null }' + echo '{ "upstream_id": 7365063560399879317, "namespace": "default", "id": "573bb475-5f44-4128-903a-c795543987f2", "state": "normal", "checkpoint_tso": 449527811298820101, "checkpoint_time": "2024-05-04 16:48:32.584", "error": null }' { "upstream_id": 7365063560399879317, "namespace": "default", "id": "573bb475-5f44-4128-903a-c795543987f2", "state": "normal", "checkpoint_tso": 449527811298820101, "checkpoint_time": "2024-05-04 16:48:32.584", "error": null } ++ echo '{' '"upstream_id":' 7365063560399879317, '"namespace":' '"default",' '"id":' '"573bb475-5f44-4128-903a-c795543987f2",' '"state":' '"normal",' '"checkpoint_tso":' 449527811298820101, '"checkpoint_time":' '"2024-05-04' '16:48:32.584",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365063560399879317, '"namespace":' '"default",' '"id":' '"573bb475-5f44-4128-903a-c795543987f2",' '"state":' '"normal",' '"checkpoint_tso":' 449527811298820101, '"checkpoint_time":' '"2024-05-04' '16:48:32.584",' '"error":' null '}' ++ jq -r .error.message table foreign_key.finish_mark not exists for 2-th check, retry later + message=null + [[ ! null =~ null ]] run task successfully check diff failed 1-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table charset_gbk_test0.t0 exists table charset_gbk_test0.t1 exists table charset_gbk_test1.t0 not exists for 1-th check, retry later wait process cdc.test exit for 1-th time... table foreign_key.finish_mark not exists for 3-th check, retry later cdc.test: no process found wait process cdc.test exit for 2-th time... process cdc.test already exit [Sat May 4 16:48:35 CST 2024] <<<<<< run test case open_protocol_handle_key_only success! >>>>>> table row_format.finish_mark not exists for 1-th check, retry later check diff failed 2-th time, retry later table charset_gbk_test1.t0 exists table test.finish_mark not exists for 1-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b29cb580013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844, pid:1861, start at 2024-05-04 16:48:35.048810173 +0800 CST m=+5.020526420 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:35.055 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:35.030 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:35.030 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b29cb580013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844, pid:1861, start at 2024-05-04 16:48:35.048810173 +0800 CST m=+5.020526420 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:35.055 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:35.030 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:35.030 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b29ccdc0018 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844, pid:1945, start at 2024-05-04 16:48:35.1765558 +0800 CST m=+5.089831187 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:50:35.182 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:48:35.176 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:38:35.176 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_big_messages/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_big_messages/tiflash/log/error.log arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_big_messages/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_big_messages/tiflash/log/proxy.log"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_big_messages/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table foreign_key.finish_mark not exists for 4-th check, retry later table row_format.finish_mark not exists for 2-th check, retry later check diff successfully wait process cdc.test exit for 1-th time... table test.finish_mark not exists for 2-th check, retry later [Sat May 4 16:48:38 CST 2024] <<<<<< START cdc server in kafka_big_messages case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_big_messages.33793381.out server --log-file /tmp/tidb_cdc_test/kafka_big_messages/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_big_messages/cdc_data --cluster-id default + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 16:48:39 CST 2024] <<<<<< run test case processor_err_chan success! >>>>>> table foreign_key.finish_mark not exists for 5-th check, retry later table row_format.finish_mark not exists for 3-th check, retry later table test.finish_mark not exists for 3-th check, retry later table foreign_key.finish_mark not exists for 6-th check, retry later table row_format.finish_mark not exists for 4-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:48:41 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ff164ed3-ad2b-4ab6-954a-bedbce8533d2 {"id":"ff164ed3-ad2b-4ab6-954a-bedbce8533d2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812518} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ca4f46c7 ff164ed3-ad2b-4ab6-954a-bedbce8533d2 /tidb/cdc/default/default/upstream/7365063632246951470 {"id":7365063632246951470,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ff164ed3-ad2b-4ab6-954a-bedbce8533d2 {"id":"ff164ed3-ad2b-4ab6-954a-bedbce8533d2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812518} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ca4f46c7 ff164ed3-ad2b-4ab6-954a-bedbce8533d2 /tidb/cdc/default/default/upstream/7365063632246951470 {"id":7365063632246951470,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ff164ed3-ad2b-4ab6-954a-bedbce8533d2 {"id":"ff164ed3-ad2b-4ab6-954a-bedbce8533d2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812518} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ca4f46c7 ff164ed3-ad2b-4ab6-954a-bedbce8533d2 /tidb/cdc/default/default/upstream/7365063632246951470 {"id":7365063632246951470,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: 7c3ed402-f667-48a0-8b48-b2c8c6414484 Info: {"upstream_id":7365063632246951470,"namespace":"default","id":"7c3ed402-f667-48a0-8b48-b2c8c6414484","sink_uri":"kafka://127.0.0.1:9092/big-message-test?protocol=open-protocol\u0026partition-num=1\u0026kafka-version=2.4.1\u0026max-message-bytes=12582912","create_time":"2024-05-04T16:48:41.806162679+08:00","start_ts":449527812844421121,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527812844421121,"checkpoint_ts":449527812844421121,"checkpoint_time":"2024-05-04 16:48:38.480"} [Sat May 4 16:48:41 CST 2024] <<<<<< START kafka consumer in kafka_big_messages case >>>>>> Starting generate kafka big messages... go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading go.uber.org/atomic v1.11.0 table test.finish_mark not exists for 4-th check, retry later table foreign_key.finish_mark not exists for 7-th check, retry later table row_format.finish_mark not exists for 5-th check, retry later table test.finish_mark not exists for 5-th check, retry later table foreign_key.finish_mark not exists for 8-th check, retry later table row_format.finish_mark not exists for 6-th check, retry later table test.finish_mark exists check table exists success check diff successfully table foreign_key.finish_mark exists check diff successfully wait process cdc.test exit for 1-th time... table kafka_big_messages.test not exists for 1-th check, retry later table row_format.finish_mark not exists for 7-th check, retry later wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc) 3727120896 bytes in 18.77 secs (198593381 bytes/sec) [Pipeline] { [Pipeline] cache wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 16:48:48 CST 2024] <<<<<< run test case charset_gbk success! >>>>>> cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 16:48:48 CST 2024] <<<<<< run test case foreign_key success! >>>>>> table kafka_big_messages.test exists table row_format.finish_mark not exists for 8-th check, retry later check diff failed 1-th time, retry later table row_format.finish_mark not exists for 9-th check, retry later check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/open_protocol_handle_key_only/run.sh: line 1: 2969 Killed cdc_kafka_consumer --upstream-uri $SINK_URI --downstream-uri="mysql://root@127.0.0.1:3306/?safe-mode=true&batch-dml-enable=false" --upstream-tidb-dsn="root@tcp(${UP_TIDB_HOST}:${UP_TIDB_PORT})/?" --config="$CUR/conf/changefeed.toml" 2>&1 \033[0;36m<<< Run all test success >>>\033[0m table row_format.finish_mark not exists for 10-th check, retry later cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 16:48:53 CST 2024] <<<<<< run test case kafka_big_messages success! >>>>>> =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_reconstruct/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... start tidb cluster in /tmp/tidb_cdc_test/changefeed_reconstruct Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table row_format.finish_mark exists check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... Verifying downstream PD is started... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 16:48:56 CST 2024] <<<<<< run test case row_format success! >>>>>> Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 16:48:52 CST 2024] <<<<<< run test case kv_client_stream_reconnect success! >>>>>> table sink_retry.finish_mark_1 exists check diff successfully ***************** properties ***************** "recordcount"="10" "operationcount"="0" "dotransactions"="false" "scanproportion"="0" "readproportion"="0" "requestdistribution"="uniform" "mysql.port"="4000" "updateproportion"="0" "workload"="core" "insertproportion"="0" "mysql.host"="127.0.0.1" "mysql.user"="root" "readallfields"="true" "threadcount"="2" "mysql.db"="sink_retry" ********************************************** Run finished, takes 4.370212ms INSERT - Takes(s): 0.0, Count: 10, OPS: 3916.3, Avg(us): 797, Min(us): 473, Max(us): 1886, 95th(us): 2000, 99th(us): 2000 Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc) 3727120896 bytes in 7.80 secs (477534241 bytes/sec) [Pipeline] { [Pipeline] cache Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/ddl_puller_lag/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) start tidb cluster in /tmp/tidb_cdc_test/ddl_puller_lag Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/cdc/run.sh using Sink-Type: kafka... <<================= VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2b93800017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-ppnsc-4cvmp, pid:4386, start at 2024-05-04 16:49:04.251320181 +0800 CST m=+5.132264597 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:51:04.259 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:49:04.224 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:39:04.224 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2b93800017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-ppnsc-4cvmp, pid:4386, start at 2024-05-04 16:49:04.251320181 +0800 CST m=+5.132264597 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:51:04.259 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:49:04.224 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:39:04.224 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2b95cc0005 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-ppnsc-4cvmp, pid:4461, start at 2024-05-04 16:49:04.374797026 +0800 CST m=+5.203708712 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:51:04.381 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:49:04.371 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:39:04.371 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/changefeed_reconstruct/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/changefeed_reconstruct/tiflash/log/error.log arg matches is ArgMatches { args: {"log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_reconstruct/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_reconstruct/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_reconstruct/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/ddl_manager/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... The 1 times to try to start tidb cluster... start tidb cluster in /tmp/tidb_cdc_test/ddl_manager Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Verifying upstream PD is started... [Sat May 4 16:49:07 CST 2024] <<<<<< START cdc server in changefeed_reconstruct case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_reconstruct.58505852.out server --log-file /tmp/tidb_cdc_test/changefeed_reconstruct/cdcserver1.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_reconstruct/cdc_dataserver1 --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/drop_many_tables/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Verifying downstream PD is started... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > start tidb cluster in /tmp/tidb_cdc_test/cdc Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:49:10 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f054ae52-890a-42be-9b9d-d2bc6b13c0a2 {"id":"f054ae52-890a-42be-9b9d-d2bc6b13c0a2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812547} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cac2dfc4 f054ae52-890a-42be-9b9d-d2bc6b13c0a2 /tidb/cdc/default/default/upstream/7365063763521348468 {"id":7365063763521348468,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f054ae52-890a-42be-9b9d-d2bc6b13c0a2 {"id":"f054ae52-890a-42be-9b9d-d2bc6b13c0a2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812547} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cac2dfc4 f054ae52-890a-42be-9b9d-d2bc6b13c0a2 /tidb/cdc/default/default/upstream/7365063763521348468 {"id":7365063763521348468,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f054ae52-890a-42be-9b9d-d2bc6b13c0a2 {"id":"f054ae52-890a-42be-9b9d-d2bc6b13c0a2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812547} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cac2dfc4 f054ae52-890a-42be-9b9d-d2bc6b13c0a2 /tidb/cdc/default/default/upstream/7365063763521348468 {"id":7365063763521348468,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 16:49:10 CST 2024] <<<<<< START kafka consumer in changefeed_reconstruct case >>>>>> Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) start tidb cluster in /tmp/tidb_cdc_test/drop_many_tables Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... ***************** properties ***************** "readallfields"="true" "workload"="core" "updateproportion"="0" "insertproportion"="0" "operationcount"="0" "readproportion"="0" "threadcount"="4" "recordcount"="50" "mysql.host"="127.0.0.1" "mysql.port"="4000" "dotransactions"="false" "requestdistribution"="uniform" "scanproportion"="0" "mysql.db"="changefeed_reconstruct" "mysql.user"="root" ********************************************** Run finished, takes 17.27931ms INSERT - Takes(s): 0.0, Count: 47, OPS: 3605.2, Avg(us): 1293, Min(us): 845, Max(us): 4156, 95th(us): 5000, 99th(us): 5000 table changefeed_reconstruct.usertable not exists for 1-th check, retry later Starting Upstream TiDB... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) start tidb cluster in /tmp/tidb_cdc_test/kafka_compression Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table changefeed_reconstruct.usertable exists check diff failed 1-th time, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2c2bf00006 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3bwq1-gzbp1, pid:4089, start at 2024-05-04 16:49:13.987629296 +0800 CST m=+5.169323019 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:51:13.996 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:49:13.980 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:39:13.980 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff successfully Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2c2bf00006 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3bwq1-gzbp1, pid:4089, start at 2024-05-04 16:49:13.987629296 +0800 CST m=+5.169323019 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:51:13.996 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:49:13.980 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:39:13.980 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2c2bd00008 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3bwq1-gzbp1, pid:4176, start at 2024-05-04 16:49:13.980352651 +0800 CST m=+5.111506987 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:51:13.987 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:49:13.972 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:39:13.972 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/ddl_puller_lag/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/ddl_puller_lag/tiflash/log/error.log arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/ddl_puller_lag/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/ddl_puller_lag/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/ddl_puller_lag/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } wait process 5855 exit for 1-th time... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process 5855 exit for 2-th time... /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils/kill_cdc_pid: line 19: kill: (5855) - No such process wait process 5855 exit for 3-th time... process 5855 already exit check_no_capture http://127.0.0.1:2379 parse error: Invalid numeric literal at line 1, column 6 run task successfully [Sat May 4 16:49:18 CST 2024] <<<<<< START cdc server in changefeed_reconstruct case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_reconstruct.61746176.out server --log-file /tmp/tidb_cdc_test/changefeed_reconstruct/cdcserver2.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_reconstruct/cdc_dataserver2 --cluster-id default --addr 127.0.0.1:8300 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2c6ed00014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-sj989-z6hpm, pid:4207, start at 2024-05-04 16:49:18.294567941 +0800 CST m=+5.162840605 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:51:18.302 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:49:18.260 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:39:18.260 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2c6ed00014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-sj989-z6hpm, pid:4207, start at 2024-05-04 16:49:18.294567941 +0800 CST m=+5.162840605 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:51:18.302 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:49:18.260 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:39:18.260 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2c6eb00006 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-sj989-z6hpm, pid:4286, start at 2024-05-04 16:49:18.257210826 +0800 CST m=+5.078591726 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:51:18.263 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:49:18.252 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:39:18.252 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/ddl_manager/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/ddl_manager/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/ddl_manager/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/ddl_manager/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/ddl_manager/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_puller_lag.cli.5619.out cli tso query --pd=http://127.0.0.1:2379 Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc) 3727120896 bytes in 18.41 secs (202405160 bytes/sec) [Pipeline] { [Pipeline] cache ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + tso='449527823592325121 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527823592325121 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Sat May 4 16:49:21 CST 2024] <<<<<< START cdc server in ddl_puller_lag case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/processorDDLResolved=1*sleep(180000)' + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_puller_lag.56665668.out server --log-file /tmp/tidb_cdc_test/ddl_puller_lag/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_puller_lag/cdc_data --cluster-id default + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [Sat May 4 16:49:21 CST 2024] <<<<<< START cdc server in ddl_manager case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/ExecuteDDLSlowly=return(true)' + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_manager.56255627.out server --log-file /tmp/tidb_cdc_test/ddl_manager/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_manager/cdc_data --cluster-id default + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:49:21 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: changefeedID: default/50f4c1c0-44a6-4005-b5b4-329a086c5c52 {UpstreamID:7365063763521348468 Namespace:default ID:50f4c1c0-44a6-4005-b5b4-329a086c5c52 SinkURI:kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-2970?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 16:49:10.761800856 +0800 CST StartTs:449527821270515717 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc003216b40 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-79-gc950cce3a Epoch:449527821296730117} {CheckpointTs:449527822725414916 MinTableBarrierTs:449527824036134916 AdminJobType:noop} span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449527822725414916, checkpointTs: 449527822725414916, state: Preparing *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/feee1c27-b1a9-4746-9d43-8d2e2ec5eddc {"id":"feee1c27-b1a9-4746-9d43-8d2e2ec5eddc","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812558} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cac2e071 feee1c27-b1a9-4746-9d43-8d2e2ec5eddc /tidb/cdc/default/default/changefeed/info/50f4c1c0-44a6-4005-b5b4-329a086c5c52 {"upstream-id":7365063763521348468,"namespace":"default","changefeed-id":"50f4c1c0-44a6-4005-b5b4-329a086c5c52","sink-uri":"kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-2970?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T16:49:10.761800856+08:00","start-ts":449527821270515717,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-79-gc950cce3a","epoch":449527821296730117} /tidb/cdc/default/default/changefeed/status/50f4c1c0-44a6-4005-b5b4-329a086c5c52 {"checkpoint-ts":449527822725414916,"min-table-barrier-ts":449527824036134916,"admin-job-type":0} /tidb/cdc/default/default/task/position/feee1c27-b1a9-4746-9d43-8d2e2ec5eddc/50f4c1c0-44a6-4005-b5b4-329a086c5c52 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365063763521348468 {"id":7365063763521348468,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/50f4c1c0-44a6-4005-b5b4-329a086c5c52 {UpstreamID:7365063763521348468 Namespace:default ID:50f4c1c0-44a6-4005-b5b4-329a086c5c52 SinkURI:kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-2970?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 16:49:10.761800856 +0800 CST StartTs:449527821270515717 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc003216b40 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-79-gc950cce3a Epoch:449527821296730117} {CheckpointTs:449527822725414916 MinTableBarrierTs:449527824036134916 AdminJobType:noop} span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449527822725414916, checkpointTs: 449527822725414916, state: Preparing *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/feee1c27-b1a9-4746-9d43-8d2e2ec5eddc {"id":"feee1c27-b1a9-4746-9d43-8d2e2ec5eddc","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812558} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cac2e071 feee1c27-b1a9-4746-9d43-8d2e2ec5eddc /tidb/cdc/default/default/changefeed/info/50f4c1c0-44a6-4005-b5b4-329a086c5c52 {"upstream-id":7365063763521348468,"namespace":"default","changefeed-id":"50f4c1c0-44a6-4005-b5b4-329a086c5c52","sink-uri":"kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-2970?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T16:49:10.761800856+08:00","start-ts":449527821270515717,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-79-gc950cce3a","epoch":449527821296730117} /tidb/cdc/default/default/changefeed/status/50f4c1c0-44a6-4005-b5b4-329a086c5c52 {"checkpoint-ts":449527822725414916,"min-table-barrier-ts":449527824036134916,"admin-job-type":0} /tidb/cdc/default/default/task/position/feee1c27-b1a9-4746-9d43-8d2e2ec5eddc/50f4c1c0-44a6-4005-b5b4-329a086c5c52 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":n+ grep -q 'failed to get info:' ull} /tidb/cdc/default/default/upstream/7365063763521348468 {"id":7365063763521348468,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/50f4c1c0-44a6-4005-b5b4-329a086c5c52 {UpstreamID:7365063763521348468 Namespace:default ID:50f4c1c0-44a6-4005-b5b4-329a086c5c52 SinkURI:kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-2970?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 16:49:10.761800856 +0800 CST StartTs:449527821270515717 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc003216b40 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-79-gc950cce3a Epoch:449527821296730117} {CheckpointTs:449527822725414916 MinTableBarrierTs:449527824036134916 AdminJobType:noop} span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449527822725414916, checkpointTs: 449527822725414916, state: Preparing *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/feee1c27-b1a9-4746-9d43-8d2e2ec5eddc {"id":"feee1c27-b1a9-4746-9d43-8d2e2ec5eddc","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812558} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cac2e071 feee1c27-b1a9-4746-9d43-8d2e2ec5eddc /tidb/cdc/default/default/changefeed/info/50f4c1c0-44a6-4005-b5b4-329a086c5c52 {"upstream-id":7365063763521348468,"namespace":"default","changefeed-id":"50f4c1c0-44a6-4005-b5b4-329a086c5c52","sink-uri":"kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-2970?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T16:49:10.761800856+08:00","start-ts":449527821270515717,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-79-gc950cce3a","epoch":449527821296730117} /tidb/cdc/default/default/changefeed/status/50f4c1c0-44a6-4005-b5b4-329a086c5c52 {"checkpoint-ts":449527822725414916,"min-table-barrier-ts":449527824036134916,"admin-job-type":0} /tidb/cdc/default/default/task/position/feee1c27-b1a9-4746-9d43-8d2e2ec5eddc/50f4c1c0-44a6-4005-b5b4-329a086c5c52 {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365063763521348468 {"id":7365063763521348468,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + break + set +x cdc.test cli capture list --pd=http://127.0.0.1:2379 2>&1 | grep id ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2c9f700014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-kjzln-hfb2r, pid:4453, start at 2024-05-04 16:49:21.410932472 +0800 CST m=+5.097320913 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:51:21.417 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:49:21.422 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:39:21.422 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2c9f700014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-kjzln-hfb2r, pid:4453, start at 2024-05-04 16:49:21.410932472 +0800 CST m=+5.097320913 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:51:21.417 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:49:21.422 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:39:21.422 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2ca0240016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-kjzln-hfb2r, pid:4537, start at 2024-05-04 16:49:21.440689637 +0800 CST m=+5.076720924 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:51:21.449 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:49:21.417 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:39:21.417 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/cdc/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/cdc/tiflash/log/error.log arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/cdc/tiflash-proxy.toml"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/cdc/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/cdc/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } "id": "feee1c27-b1a9-4746-9d43-8d2e2ec5eddc", "cluster-id": "default" run task successfully VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2caa800013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fzbr4-3vph9, pid:4132, start at 2024-05-04 16:49:22.109270704 +0800 CST m=+5.441589376 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:51:22.116 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:49:22.080 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:39:22.080 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) capture_id: feee1c27-b1a9-4746-9d43-8d2e2ec5eddc check_processor_table_count http://127.0.0.1:2379 50f4c1c0-44a6-4005-b5b4-329a086c5c52 feee1c27-b1a9-4746-9d43-8d2e2ec5eddc 1 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:49:24 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/150a59a5-6f9e-483e-99d7-4b3fa9f7b6cb {"id":"150a59a5-6f9e-483e-99d7-4b3fa9f7b6cb","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812561} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cae0faf0 150a59a5-6f9e-483e-99d7-4b3fa9f7b6cb /tidb/cdc/default/default/upstream/7365063802775080429 {"id":7365063802775080429,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/150a59a5-6f9e-483e-99d7-4b3fa9f7b6cb {"id":"150a59a5-6f9e-483e-99d7-4b3fa9f7b6cb","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812561} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cae0faf0 150a59a5-6f9e-483e-99d7-4b3fa9f7b6cb /tidb/cdc/default/default/upstream/7365063802775080429 {"id":7365063802775080429,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/150a59a5-6f9e-483e-99d7-4b3fa9f7b6cb {"id":"150a59a5-6f9e-483e-99d7-4b3fa9f7b6cb","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812561} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cae0faf0 150a59a5-6f9e-483e-99d7-4b3fa9f7b6cb /tidb/cdc/default/default/upstream/7365063802775080429 {"id":7365063802775080429,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_puller_lag.cli.5726.out cli changefeed create --start-ts=449527823592325121 '--sink-uri=kafka+ssl://127.0.0.1:9092/ticdc-ddl-puller-lag-test-27399?protocol=open-protocol&partition-num=4&kafka-client-id=ddl_puller_lag&kafka-version=2.4.1&max-message-bytes=10485760' run task successfully + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:49:24 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b4c13407-9337-40a6-8864-7a046c81a369 {"id":"b4c13407-9337-40a6-8864-7a046c81a369","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812561} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42caf1b1ca b4c13407-9337-40a6-8864-7a046c81a369 /tidb/cdc/default/default/upstream/7365063820900411702 {"id":7365063820900411702,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b4c13407-9337-40a6-8864-7a046c81a369 {"id":"b4c13407-9337-40a6-8864-7a046c81a369","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812561} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42caf1b1ca b4c13407-9337-40a6-8864-7a046c81a369 /tidb/cdc/default/default/upstream/7365063820900411702 {"id":7365063820900411702,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b4c13407-9337-40a6-8864-7a046c81a369 {"id":"b4c13407-9337-40a6-8864-7a046c81a369","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812561} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42caf1b1ca b4c13407-9337-40a6-8864-7a046c81a369 /tidb/cdc/default/default/upstream/7365063820900411702 {"id":7365063820900411702,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_manager.cli.5688.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-18915?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c=ddl-manager [Sat May 4 16:49:24 CST 2024] <<<<<< START cdc server in cdc case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cdc.59275929.out server --log-file /tmp/tidb_cdc_test/cdc/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/cdc/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Create changefeed successfully! ID: 0b159442-eb1d-4788-8c71-39dbe90bb13f Info: {"upstream_id":7365063802775080429,"namespace":"default","id":"0b159442-eb1d-4788-8c71-39dbe90bb13f","sink_uri":"kafka+ssl://127.0.0.1:9092/ticdc-ddl-puller-lag-test-27399?protocol=open-protocol\u0026partition-num=4\u0026kafka-client-id=ddl_puller_lag\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:49:24.688238805+08:00","start_ts":449527823592325121,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527823592325121,"checkpoint_ts":449527823592325121,"checkpoint_time":"2024-05-04 16:49:19.480"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... check_processor_table_count http://127.0.0.1:2379 50f4c1c0-44a6-4005-b5b4-329a086c5c52 feee1c27-b1a9-4746-9d43-8d2e2ec5eddc 0 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2caa800013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fzbr4-3vph9, pid:4132, start at 2024-05-04 16:49:22.109270704 +0800 CST m=+5.441589376 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:51:22.116 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:49:22.080 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:39:22.080 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2cabec0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fzbr4-3vph9, pid:4208, start at 2024-05-04 16:49:22.193684915 +0800 CST m=+5.474966367 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:51:22.200 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:49:22.171 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:39:22.171 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/drop_many_tables/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/drop_many_tables/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/drop_many_tables/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/drop_many_tables/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/drop_many_tables/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Create changefeed successfully! ID: ddl-manager Info: {"upstream_id":7365063820900411702,"namespace":"default","id":"ddl-manager","sink_uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-18915?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:49:24.968335097+08:00","start_ts":449527824989814788,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527824989814788,"checkpoint_ts":449527824989814788,"checkpoint_time":"2024-05-04 16:49:24.811"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... table count 1 does equal to expected count 0 run task failed 1-th time, retry later + set +x [Sat May 4 16:49:26 CST 2024] <<<<<< START kafka consumer in ddl_puller_lag case >>>>>> + set +x [Sat May 4 16:49:26 CST 2024] <<<<<< START kafka consumer in ddl_manager case >>>>>> VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2cd4d00003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844, pid:5746, start at 2024-05-04 16:49:24.788805041 +0800 CST m=+8.597824170 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:51:24.794 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:49:24.788 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:39:24.788 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2cd4d00003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844, pid:5746, start at 2024-05-04 16:49:24.788805041 +0800 CST m=+8.597824170 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:51:24.794 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:49:24.788 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:39:24.788 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2c9ed40014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844, pid:5831, start at 2024-05-04 16:49:21.360040449 +0800 CST m=+5.107945080 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:51:21.366 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:49:21.333 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:39:21.333 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_compression/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_compression/tiflash/log/error.log arg matches is ArgMatches { args: {"log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_compression/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_compression/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_compression/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.drop_many_tables.cli.5663.out cli tso query --pd=http://127.0.0.1:2379 check_processor_table_count http://127.0.0.1:2379 50f4c1c0-44a6-4005-b5b4-329a086c5c52 feee1c27-b1a9-4746-9d43-8d2e2ec5eddc 0 run task successfully + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:49:27 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/01db710d-6fb7-4657-ae8d-ae2ddfbe361c {"id":"01db710d-6fb7-4657-ae8d-ae2ddfbe361c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812564} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cafc49c5 01db710d-6fb7-4657-ae8d-ae2ddfbe361c /tidb/cdc/default/default/upstream/7365063837197787033 {"id":7365063837197787033,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/01db710d-6fb7-4657-ae8d-ae2ddfbe361c {"id":"01db710d-6fb7-4657-ae8d-ae2ddfbe361c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812564} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cafc49c5 01db710d-6fb7-4657-ae8d-ae2ddfbe361c /tidb/cdc/default/default/upstream/7365063837197787033 {"id":7365063837197787033,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/01db710d-6fb7-4657-ae8d-ae2ddfbe361c {"id":"01db710d-6fb7-4657-ae8d-ae2ddfbe361c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812564} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cafc49c5 01db710d-6fb7-4657-ae8d-ae2ddfbe361c /tidb/cdc/default/default/upstream/7365063837197787033 {"id":7365063837197787033,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cdc.cli.5985.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-cdc-test-29707?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --config /tmp/tidb_cdc_test/cdc/pulsar_test.toml Create changefeed successfully! ID: ec00c027-d237-4bd0-880d-7e68994bc4fa Info: {"upstream_id":7365063837197787033,"namespace":"default","id":"ec00c027-d237-4bd0-880d-7e68994bc4fa","sink_uri":"kafka://127.0.0.1:9092/ticdc-cdc-test-29707?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:49:28.181704419+08:00","start_ts":449527825831559174,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527825831559174,"checkpoint_ts":449527825831559174,"checkpoint_time":"2024-05-04 16:49:28.022"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... [Sat May 4 16:49:28 CST 2024] <<<<<< START cdc server in kafka_compression case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.71787180.out server --log-file /tmp/tidb_cdc_test/kafka_compression/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_compression/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 ***************** properties ***************** "mysql.port"="4000" "mysql.db"="changefeed_reconstruct" "mysql.host"="127.0.0.1" "readproportion"="0" "dotransactions"="false" "recordcount"="50" "workload"="core" "readallfields"="true" "mysql.user"="root" "operationcount"="0" "scanproportion"="0" "requestdistribution"="uniform" "threadcount"="4" "updateproportion"="0" "insertproportion"="0" ********************************************** Run finished, takes 30.085278ms INSERT - Takes(s): 0.0, Count: 48, OPS: 3522.0, Avg(us): 2444, Min(us): 887, Max(us): 16530, 95th(us): 17000, 99th(us): 17000 table changefeed_reconstruct.usertable not exists for 1-th check, retry later + set +x + tso='449527825637048321 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527825637048321 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 16:49:28 CST 2024] <<<<<< START cdc server in drop_many_tables case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.drop_many_tables.56995701.out server --log-file /tmp/tidb_cdc_test/drop_many_tables/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/drop_many_tables/cdc_data --cluster-id default + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + set +x [Sat May 4 16:49:29 CST 2024] <<<<<< START kafka consumer in cdc case >>>>>> go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading go.uber.org/atomic v1.11.0 go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/coreos/go-semver v0.3.1 go: downloading github.com/golang/protobuf v1.5.4 go: downloading golang.org/x/net v0.24.0 go: downloading golang.org/x/sys v0.19.0 go: downloading google.golang.org/protobuf v1.33.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda go: downloading golang.org/x/text v0.14.0 table sink_retry.finish_mark_2 exists check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 16:49:29 CST 2024] <<<<<< run test case sink_retry success! >>>>>> table changefeed_reconstruct.usertable exists check diff failed 1-th time, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:49:31 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ef76c2b2-464e-4731-a41f-0f2a59c21a49 {"id":"ef76c2b2-464e-4731-a41f-0f2a59c21a49","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812568} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb0588cf ef76c2b2-464e-4731-a41f-0f2a59c21a49 /tidb/cdc/default/default/upstream/7365063833962521616 {"id":7365063833962521616,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ef76c2b2-464e-4731-a41f-0f2a59c21a49 {"id":"ef76c2b2-464e-4731-a41f-0f2a59c21a49","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812568} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb0588cf ef76c2b2-464e-4731-a41f-0f2a59c21a49 /tidb/cdc/default/default/upstream/7365063833962521616 {"id":7365063833962521616,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/ef76c2b2-464e-4731-a41f-0f2a59c21a49 {"id":"ef76c2b2-464e-4731-a41f-0f2a59c21a49","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812568} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb0588cf ef76c2b2-464e-4731-a41f-0f2a59c21a49 /tidb/cdc/default/default/upstream/7365063833962521616 {"id":7365063833962521616,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7239.out cli tso query --pd=http://127.0.0.1:2379 go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading golang.org/x/sync v0.7.0 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/tidwall/btree v1.7.0 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/google/btree v1.1.2 go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading github.com/google/uuid v1.6.0 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/golang/snappy v0.0.4 go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/stretchr/testify v1.9.0 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading golang.org/x/tools v0.20.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/klauspost/compress v1.17.8 go: downloading cloud.google.com/go v0.112.2 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading google.golang.org/api v0.170.0 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading golang.org/x/time v0.5.0 go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading golang.org/x/crypto v0.22.0 go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading github.com/jfcg/sixb v1.3.8 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading github.com/robfig/cron v1.2.0 go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading github.com/apache/thrift v0.16.0 go: downloading github.com/kr/text v0.2.0 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:49:31 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3d05c000-9bd0-4288-a635-4f1ccf684554 {"id":"3d05c000-9bd0-4288-a635-4f1ccf684554","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812569} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb0198d6 3d05c000-9bd0-4288-a635-4f1ccf684554 /tidb/cdc/default/default/upstream/7365063834957527197 {"id":7365063834957527197,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3d05c000-9bd0-4288-a635-4f1ccf684554 {"id":"3d05c000-9bd0-4288-a635-4f1ccf684554","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812569} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb0198d6 3d05c000-9bd0-4288-a635-4f1ccf684554 /tidb/cdc/default/default/upstream/7365063834957527197 {"id":7365063834957527197,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/3d05c000-9bd0-4288-a635-4f1ccf684554 {"id":"3d05c000-9bd0-4288-a635-4f1ccf684554","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812569} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb0198d6 3d05c000-9bd0-4288-a635-4f1ccf684554 /tidb/cdc/default/default/upstream/7365063834957527197 {"id":7365063834957527197,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.drop_many_tables.cli.5755.out cli changefeed create --start-ts=449527825637048321 '--sink-uri=kafka://127.0.0.1:9092/ticdc-drop-tables-test-26991?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: c4d848b3-35a1-4aa0-b2e6-b442d6734303 Info: {"upstream_id":7365063834957527197,"namespace":"default","id":"c4d848b3-35a1-4aa0-b2e6-b442d6734303","sink_uri":"kafka://127.0.0.1:9092/ticdc-drop-tables-test-26991?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:49:32.294004674+08:00","start_ts":449527825637048321,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527825637048321,"checkpoint_ts":449527825637048321,"checkpoint_time":"2024-05-04 16:49:27.280"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... go: downloading github.com/go-logr/logr v1.4.1 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/go-logr/stdr v1.2.2 go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 go: downloading github.com/DataDog/zstd v1.5.5 check diff successfully wait process cdc.test exit for 1-th time... + set +x + tso='449527826858115076 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527826858115076 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7282.out cli changefeed create --start-ts=449527826858115076 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-gzip-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=gzip' -c gzip Create changefeed successfully! ID: gzip Info: {"upstream_id":7365063833962521616,"namespace":"default","id":"gzip","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-gzip-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=gzip","create_time":"2024-05-04T16:49:33.825488913+08:00","start_ts":449527826858115076,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527826858115076,"checkpoint_ts":449527826858115076,"checkpoint_time":"2024-05-04 16:49:31.938"} PASS wait process cdc.test exit for 2-th time... coverage: 2.4% of statements in github.com/pingcap/tiflow/... + set +x [Sat May 4 16:49:33 CST 2024] <<<<<< START kafka consumer in drop_many_tables case >>>>>> cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 16:49:34 CST 2024] <<<<<< run test case changefeed_reconstruct success! >>>>>> table drop_tables.c not exists for 1-th check, retry later + set +x [Sat May 4 16:49:35 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>> Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc) 3727120896 bytes in 13.74 secs (271205630 bytes/sec) [Pipeline] { [Pipeline] cache go: downloading github.com/jmespath/go-jmespath v0.4.0 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading github.com/google/s2a-go v0.1.7 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading github.com/felixge/httpsnoop v1.0.4 go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/05/04 16:49:33.784 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 16:49:33.820 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 16:49:33.944 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 16:49:33.955 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 16:49:34.920 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 16:49:34.930 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"]: syntax error: operand expected (error token is "[2024/05/04 16:49:33.784 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 16:49:33.820 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 16:49:33.944 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 16:49:33.955 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 16:49:34.920 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"] [2024/05/04 16:49:34.930 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"]") table test.gzip_finish_mark not exists for 1-th check, retry later table drop_tables.c not exists for 2-th check, retry later table test.gzip_finish_mark not exists for 2-th check, retry later table drop_tables.c not exists for 3-th check, retry later table test.gzip_finish_mark exists check diff successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7417.out cli changefeed pause -c gzip table drop_tables.c not exists for 4-th check, retry later PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_error/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7459.out cli changefeed remove -c gzip table drop_tables.c not exists for 5-th check, retry later Changefeed remove successfully. ID: gzip CheckpointTs: 449527829033648133 SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-gzip-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=gzip PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... wait process 5630 exit for 1-th time... wait process 5630 exit for 2-th time... wait process 5630 exit for 3-th time... wait process 5630 exit for 4-th time... table drop_tables.c exists check diff successfully /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils/kill_cdc_pid: line 19: kill: (5630) - No such process wait process 5630 exit for 5-th time... process 5630 already exit [Sat May 4 16:49:44 CST 2024] <<<<<< START cdc server in ddl_manager case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/ExecuteDDLSlowly=return(true)' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_manager.57995801.out server --log-file /tmp/tidb_cdc_test/ddl_manager/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_manager/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7497.out cli tso query --pd=http://127.0.0.1:2379 wait process cdc.test exit for 1-th time... start tidb cluster in /tmp/tidb_cdc_test/changefeed_error Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 16:49:46 CST 2024] <<<<<< run test case drop_many_tables success! >>>>>> + set +x + tso='449527830279094274 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527830279094274 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7534.out cli changefeed create --start-ts=449527830279094274 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-snappy-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=snappy' -c snappy Create changefeed successfully! ID: snappy Info: {"upstream_id":7365063833962521616,"namespace":"default","id":"snappy","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-snappy-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=snappy","create_time":"2024-05-04T16:49:46.885136519+08:00","start_ts":449527830279094274,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527830279094274,"checkpoint_ts":449527830279094274,"checkpoint_time":"2024-05-04 16:49:44.988"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > + set +x [Sat May 4 16:49:48 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>> /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/05/04 16:49:46.834 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 16:49:46.881 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 16:49:46.971 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 16:49:46.981 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 16:49:47.970 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 16:49:47.978 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"]: syntax error: operand expected (error token is "[2024/05/04 16:49:46.834 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 16:49:46.881 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 16:49:46.971 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 16:49:46.981 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 16:49:47.970 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"] [2024/05/04 16:49:47.978 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"]") table test.snappy_finish_mark not exists for 1-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_capture/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc) 3727120896 bytes in 11.13 secs (334770153 bytes/sec) [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:49:50 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7365063820900411702 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-18915?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 16:49:24.968335097 +0800 CST StartTs:449527824989814788 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc00327fd40 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-79-gc950cce3a Epoch:449527825029136386} {CheckpointTs:449527825815568403 MinTableBarrierTs:449527825815568403 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/420d4311-1ba3-47e6-bff7-e4ad3dd02de1 {"id":"420d4311-1ba3-47e6-bff7-e4ad3dd02de1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812585} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42caf1b4b5 420d4311-1ba3-47e6-bff7-e4ad3dd02de1 /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7365063820900411702,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-18915?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T16:49:24.968335097+08:00","start-ts":449527824989814788,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-79-gc950cce3a","epoch":449527825029136386} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449527825815568403,"min-table-barrier-ts":449527825815568403,"admin-job-type":0} /tidb/cdc/default/default/task/position/420d4311-1ba3-47e6-bff7-e4ad3dd02de1/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365063820900411702 {"id":7365063820900411702,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7365063820900411702 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-18915?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 16:49:24.968335097 +0800 CST StartTs:449527824989814788 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc00327fd40 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-79-gc950cce3a Epoch:449527825029136386} {CheckpointTs:449527825815568403 MinTableBarrierTs:449527825815568403 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/420d4311-1ba3-47e6-bff7-e4ad3dd02de1 {"id":"420d4311-1ba3-47e6-bff7-e4ad3dd02de1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812585} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42caf1b4b5 420d4311-1ba3-47e6-bff7-e4ad3dd02de1 /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7365063820900411702,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-18915?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T16:49:24.968335097+08:00","start-ts":449527824989814788,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-79-gc950cce3a","epoch":449527825029136386} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449527825815568403,"min-table-barrier-ts":449527825815568403,"admin-job-type":0} /tidb/cdc/default/default/task/position/420d4311-1ba3-47e6-bff7-e4ad3dd02de1/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365063820900411702 + grep -q 'failed to get info:' {"id":7365063820900411702,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7365063820900411702 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-18915?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 16:49:24.968335097 +0800 CST StartTs:449527824989814788 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc00327fd40 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-79-gc950cce3a Epoch:449527825029136386} {CheckpointTs:449527825815568403 MinTableBarrierTs:449527825815568403 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/420d4311-1ba3-47e6-bff7-e4ad3dd02de1 {"id":"420d4311-1ba3-47e6-bff7-e4ad3dd02de1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812585} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42caf1b4b5 420d4311-1ba3-47e6-bff7-e4ad3dd02de1 /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7365063820900411702,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-18915?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T16:49:24.968335097+08:00","start-ts":449527824989814788,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-79-gc950cce3a","epoch":449527825029136386} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449527825815568403,"min-table-barrier-ts":449527825815568403,"admin-job-type":0} /tidb/cdc/default/default/task/position/420d4311-1ba3-47e6-bff7-e4ad3dd02de1/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365063820900411702 {"id":7365063820900411702,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 16:49:50 CST 2024] <<<<<< START cdc server in ddl_manager case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/ExecuteDDLSlowly=return(true)' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_manager.58635865.out server --log-file /tmp/tidb_cdc_test/ddl_manager/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_manager/cdc_data --cluster-id default + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] sh start tidb cluster in /tmp/tidb_cdc_test/multi_capture Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... [Pipeline] sh Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh table test.snappy_finish_mark not exists for 2-th check, retry later + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] sh + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 [Pipeline] withEnv [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] container [Pipeline] { < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:49:52 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7365063820900411702 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-18915?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 16:49:24.968335097 +0800 CST StartTs:449527824989814788 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc00327fd40 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-79-gc950cce3a Epoch:449527825029136386} {CheckpointTs:449527825815568403 MinTableBarrierTs:449527825815568403 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/420d4311-1ba3-47e6-bff7-e4ad3dd02de1 {"id":"420d4311-1ba3-47e6-bff7-e4ad3dd02de1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812585} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42caf1b4b5 420d4311-1ba3-47e6-bff7-e4ad3dd02de1 /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7365063820900411702,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-18915?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T16:49:24.968335097+08:00","start-ts":449527824989814788,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-79-gc950cce3a","epoch":449527825029136386} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449527825946640412,"min-table-barrier-ts":449527825946640412,"admin-job-type":0} /tidb/cdc/default/default/task/position/420d4311-1ba3-47e6-bff7-e4ad3dd02de1/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365063820900411702 {"id":7365063820900411702,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7365063820900411702 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-18915?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 16:49:24.968335097 +0800 CST StartTs:449527824989814788 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc00327fd40 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-79-gc950cce3a Epoch:449527825029136386} {CheckpointTs:449527825815568403 MinTableBarrierTs:449527825815568403 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/420d4311-1ba3-47e6-bff7-e4ad3dd02de1 {"id":"420d4311-1ba3-47e6-bff7-e4ad3dd02de1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812585} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42caf1b4b5 420d4311-1ba3-47e6-bff7-e4ad3dd02de1 /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7365063820900411702,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-18915?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T16:49:24.968335097+08:00","start-ts":449527824989814788,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-79-gc950cce3a","epoch":449527825029136386} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449527825946640412,"min-table-barrier-ts":449527825946640412,"admin-job-type":0} /tidb/cdc/default/default/task/position/420d4311-1ba3-47e6-bff7-e4ad3dd02de1/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365063820900411702 {"id":7365063820900411702,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-manager {UpstreamID:7365063820900411702 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-18915?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 16:49:24.968335097 +0800 CST StartTs:449527824989814788 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc00327fd40 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-79-gc950cce3a Epoch:449527825029136386} {CheckpointTs:449527825815568403 MinTableBarrierTs:449527825815568403 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/420d4311-1ba3-47e6-bff7-e4ad3dd02de1 {"id":"420d4311-1ba3-47e6-bff7-e4ad3dd02de1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812585} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42caf1b4b5 420d4311-1ba3-47e6-bff7-e4ad3dd02de1 /tidb/cdc/default/default/changefeed/info/ddl-manager {"upstream-id":7365063820900411702,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-18915?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T16:49:24.968335097+08:00","start-ts":449527824989814788,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-79-gc950cce3a","epoch":449527825029136386} /tidb/cdc/default/default/changefeed/status/ddl-manager {"checkpoint-ts":449527825946640412,"min-table-barrier-ts":449527825946640412,"admin-job-type":0} /tidb/cdc/default/default/task/position/420d4311-1ba3-47e6-bff7-e4ad3dd02de1/ddl-manager {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365063820900411702 + grep -q 'etcd info' {"id":7365063820900411702,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + break + set +x table ddl_manager.finish_mark not exists for 1-th check, retry later table test.snappy_finish_mark exists check diff successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7665.out cli changefeed pause -c snappy ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... table ddl_manager.finish_mark not exists for 2-th check, retry later + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7699.out cli changefeed remove -c snappy ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Changefeed remove successfully. ID: snappy CheckpointTs: 449527831288086556 SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-snappy-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=snappy PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... table ddl_manager.finish_mark not exists for 3-th check, retry later + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7737.out cli tso query --pd=http://127.0.0.1:2379 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2ebfdc0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tvq5x-hqf9m, pid:4308, start at 2024-05-04 16:49:56.260689908 +0800 CST m=+5.107710784 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:51:56.267 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:49:56.266 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:39:56.266 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2ebfdc0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tvq5x-hqf9m, pid:4308, start at 2024-05-04 16:49:56.260689908 +0800 CST m=+5.107710784 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:51:56.267 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:49:56.266 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:39:56.266 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2ec0940014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tvq5x-hqf9m, pid:4397, start at 2024-05-04 16:49:56.29173912 +0800 CST m=+5.084524114 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:51:56.299 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:49:56.261 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:39:56.261 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/changefeed_error/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/changefeed_error/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_error/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_error/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_error/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_stop_delay/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... + set +x + tso='449527833647644675 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527833647644675 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7773.out cli changefeed create --start-ts=449527833647644675 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-lz4-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=lz4' -c lz4 table ddl_manager.finish_mark not exists for 4-th check, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.5728.out cli tso query --pd=http://127.0.0.1:2379 [Pipeline] } Create changefeed successfully! ID: lz4 Info: {"upstream_id":7365063833962521616,"namespace":"default","id":"lz4","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-lz4-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=lz4","create_time":"2024-05-04T16:49:59.724969678+08:00","start_ts":449527833647644675,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527833647644675,"checkpoint_ts":449527833647644675,"checkpoint_time":"2024-05-04 16:49:57.838"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc already exists) [Pipeline] withEnv [Pipeline] { ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) start tidb cluster in /tmp/tidb_cdc_test/processor_stop_delay Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table ddl_manager.finish_mark not exists for 5-th check, retry later + set +x [Sat May 4 16:50:01 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>> + set +x + tso='449527834153058306 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527834153058306 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x ***************** properties ***************** "mysql.host"="127.0.0.1" "mysql.user"="root" "operationcount"="0" "recordcount"="20" "dotransactions"="false" "insertproportion"="0" "workload"="core" "scanproportion"="0" "readproportion"="0" "threadcount"="4" "mysql.db"="changefeed_error" "mysql.port"="4000" "readallfields"="true" "updateproportion"="0" "requestdistribution"="uniform" ********************************************** Run finished, takes 22.006583ms INSERT - Takes(s): 0.0, Count: 20, OPS: 3232.2, Avg(us): 4209, Min(us): 951, Max(us): 15800, 95th(us): 16000, 99th(us): 16000 [Sat May 4 16:50:01 CST 2024] <<<<<< START cdc server in changefeed_error case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/NewChangefeedNoRetryError=1*return(true)' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.57905792.out server --log-file /tmp/tidb_cdc_test/changefeed_error/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_error/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2f08b80014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-ppnsc-4cvmp, pid:7617, start at 2024-05-04 16:50:00.912764013 +0800 CST m=+5.127719845 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:00.919 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:00.878 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:00.878 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2f08b80014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-ppnsc-4cvmp, pid:7617, start at 2024-05-04 16:50:00.912764013 +0800 CST m=+5.127719845 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:00.919 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:00.878 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:00.878 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2f0ba8000a Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-ppnsc-4cvmp, pid:7696, start at 2024-05-04 16:50:01.075712299 +0800 CST m=+5.235595319 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:01.081 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:01.066 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:01.066 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/multi_capture/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/multi_capture/tiflash/log/error.log arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_capture/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_capture/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_capture/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/05/04 16:49:59.688 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 16:49:59.721 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 16:49:59.824 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 16:49:59.832 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 16:50:00.821 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 16:50:00.828 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"]: syntax error: operand expected (error token is "[2024/05/04 16:49:59.688 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 16:49:59.721 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 16:49:59.824 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 16:49:59.832 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 16:50:00.821 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"] [2024/05/04 16:50:00.828 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"]") table test.lz4_finish_mark not exists for 1-th check, retry later table ddl_manager.finish_mark not exists for 6-th check, retry later [Pipeline] container [Pipeline] { [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] stage [Pipeline] { (Test) [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.cli.9081.out cli tso query --pd=http://127.0.0.1:2379 table test.lz4_finish_mark not exists for 2-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:50:04 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/137ed278-4720-43a4-bd44-212d56a502c7 {"id":"137ed278-4720-43a4-bd44-212d56a502c7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812601} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb8657e8 137ed278-4720-43a4-bd44-212d56a502c7 /tidb/cdc/default/default/upstream/7365063988073976772 {"id":7365063988073976772,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/137ed278-4720-43a4-bd44-212d56a502c7 {"id":"137ed278-4720-43a4-bd44-212d56a502c7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812601} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb8657e8 137ed278-4720-43a4-bd44-212d56a502c7 /tidb/cdc/default/default/upstream/7365063988073976772 {"id":7365063988073976772,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/137ed278-4720-43a4-bd44-212d56a502c7 {"id":"137ed278-4720-43a4-bd44-212d56a502c7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812601} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb8657e8 137ed278-4720-43a4-bd44-212d56a502c7 /tidb/cdc/default/default/upstream/7365063988073976772 {"id":7365063988073976772,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.5851.out cli changefeed create --start-ts=449527834153058306 '--sink-uri=kafka://127.0.0.1:9092/ticdc-sink-retry-test-30679?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c changefeed-error [Pipeline] timeout Timeout set to expire in 45 min [Pipeline] { Create changefeed successfully! ID: changefeed-error Info: {"upstream_id":7365063988073976772,"namespace":"default","id":"changefeed-error","sink_uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-30679?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:50:05.020509386+08:00","start_ts":449527834153058306,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527834153058306,"checkpoint_ts":449527834153058306,"checkpoint_time":"2024-05-04 16:49:59.766"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... [Pipeline] // cache [Pipeline] } Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] withCredentials Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN [Pipeline] // timeout table ddl_manager.finish_mark not exists for 7-th check, retry later [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // timeout [Pipeline] } + set +x + tso='449527835374911489 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527835374911489 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Pipeline] // dir [Pipeline] } [Pipeline] { [Pipeline] { [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container table test.lz4_finish_mark exists check diff successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7915.out cli changefeed pause -c lz4 [Pipeline] // container [Pipeline] sh Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... [Pipeline] sh + set +x [Sat May 4 16:50:06 CST 2024] <<<<<< START kafka consumer in changefeed_error case >>>>>> check_changefeed_state http://127.0.0.1:2379 changefeed-error failed [CDC:ErrStartTsBeforeGC] + endpoints=http://127.0.0.1:2379 + changefeed_id=changefeed-error + expected_state=failed + error_msg='[CDC:ErrStartTsBeforeGC]' + tls_dir='[CDC:ErrStartTsBeforeGC]' + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-error -s + info='{ "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-error", "state": "failed", "checkpoint_tso": 449527834153058306, "checkpoint_time": "2024-05-04 16:49:59.766", "error": { "time": "2024-05-04T16:50:05.103855921+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrStartTsBeforeGC", "message": "[CDC:ErrStartTsBeforeGC]fail to create or maintain changefeed because start-ts 449527834153058006 is earlier than or equal to GC safepoint at 449527834153058306" } }' + echo '{ "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-error", "state": "failed", "checkpoint_tso": 449527834153058306, "checkpoint_time": "2024-05-04 16:49:59.766", "error": { "time": "2024-05-04T16:50:05.103855921+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrStartTsBeforeGC", "message": "[CDC:ErrStartTsBeforeGC]fail to create or maintain changefeed because start-ts 449527834153058006 is earlier than or equal to GC safepoint at 449527834153058306" } }' { "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-error", "state": "failed", "checkpoint_tso": 449527834153058306, "checkpoint_time": "2024-05-04 16:49:59.766", "error": { "time": "2024-05-04T16:50:05.103855921+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrStartTsBeforeGC", "message": "[CDC:ErrStartTsBeforeGC]fail to create or maintain changefeed because start-ts 449527834153058006 is earlier than or equal to GC safepoint at 449527834153058306" } } ++ jq -r .state ++ echo '{' '"upstream_id":' 7365063988073976772, '"namespace":' '"default",' '"id":' '"changefeed-error",' '"state":' '"failed",' '"checkpoint_tso":' 449527834153058306, '"checkpoint_time":' '"2024-05-04' '16:49:59.766",' '"error":' '{' '"time":' '"2024-05-04T16:50:05.103855921+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrStartTsBeforeGC",' '"message":' '"[CDC:ErrStartTsBeforeGC]fail' to create or maintain changefeed because start-ts 449527834153058006 is earlier than or equal to GC safepoint at '449527834153058306"' '}' '}' + state=failed + [[ ! failed == \f\a\i\l\e\d ]] ++ jq -r .error.message ++ echo '{' '"upstream_id":' 7365063988073976772, '"namespace":' '"default",' '"id":' '"changefeed-error",' '"state":' '"failed",' '"checkpoint_tso":' 449527834153058306, '"checkpoint_time":' '"2024-05-04' '16:49:59.766",' '"error":' '{' '"time":' '"2024-05-04T16:50:05.103855921+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrStartTsBeforeGC",' '"message":' '"[CDC:ErrStartTsBeforeGC]fail' to create or maintain changefeed because start-ts 449527834153058006 is earlier than or equal to GC safepoint at '449527834153058306"' '}' '}' + message='[CDC:ErrStartTsBeforeGC]fail to create or maintain changefeed because start-ts 449527834153058006 is earlier than or equal to GC safepoint at 449527834153058306' + [[ ! [CDC:ErrStartTsBeforeGC]fail to create or maintain changefeed because start-ts 449527834153058006 is earlier than or equal to GC safepoint at 449527834153058306 =~ \[CDC:ErrStartTsBeforeGC] ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.5970.out cli changefeed resume -c changefeed-error Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] sh ***************** properties ***************** "insertproportion"="0" "threadcount"="2" "workload"="core" "mysql.user"="root" "updateproportion"="0" "mysql.host"="127.0.0.1" "dotransactions"="false" "mysql.db"="multi_capture_1" "operationcount"="0" "requestdistribution"="uniform" "readproportion"="0" "readallfields"="true" "scanproportion"="0" "recordcount"="10" "mysql.port"="4000" ********************************************** Run finished, takes 9.041879ms INSERT - Takes(s): 0.0, Count: 10, OPS: 2026.6, Avg(us): 1716, Min(us): 1039, Max(us): 3958, 95th(us): 4000, 99th(us): 4000 + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G12 Run cases: many_pk_or_uk capture_session_done_during_task ddl_attributes PROW_JOB_ID=8011d7ec-925e-42c5-a960-c8067df92ded JENKINS_NODE_COOKIE=b522bac8-e29c-47e6-87f6-8837ca201ad7 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-bp09q-gthp5 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786675747499282435","prowjobid":"8011d7ec-925e-42c5-a960-c8067df92ded","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/c950cce3a9b105fd95bb2c788e1ab69ec32e0668","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1836 TEST_GROUP=G12 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786675747499282435 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-bp09q GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-bp09q pingcap-tiflow-pull-cdc-integration-kafka-test-1836-bp09q-gthp5 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-bp09q-gthp5 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1836 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/many_pk_or_uk/run.sh using Sink-Type: kafka... <<================= PASS coverage: 2.1% of statements in github.com/pingcap/tiflow/... table ddl_manager.finish_mark not exists for 8-th check, retry later [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G13 Run cases: tiflash region_merge common_1 PROW_JOB_ID=8011d7ec-925e-42c5-a960-c8067df92ded JENKINS_NODE_COOKIE=288e1ffb-3ebd-48b2-9f61-4d252f25495b BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k8pxt-9vhj7 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786675747499282435","prowjobid":"8011d7ec-925e-42c5-a960-c8067df92ded","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/c950cce3a9b105fd95bb2c788e1ab69ec32e0668","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1836 TEST_GROUP=G13 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786675747499282435 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-k8pxt GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-k8pxt pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k8pxt-9vhj7 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k8pxt-9vhj7 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1836 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/tiflash/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] sh + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7949.out cli changefeed remove -c lz4 + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G09 Run cases: gc_safepoint changefeed_pause_resume cli_with_auth savepoint synced_status PROW_JOB_ID=8011d7ec-925e-42c5-a960-c8067df92ded JENKINS_NODE_COOKIE=0c2d13e6-6a00-43c5-a516-a44e698b1627 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-f9n2q-3jlvd HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786675747499282435","prowjobid":"8011d7ec-925e-42c5-a960-c8067df92ded","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/c950cce3a9b105fd95bb2c788e1ab69ec32e0668","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1836 TEST_GROUP=G09 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786675747499282435 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-f9n2q GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-f9n2q pingcap-tiflow-pull-cdc-integration-kafka-test-1836-f9n2q-3jlvd GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-f9n2q-3jlvd GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1836 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/gc_safepoint/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... ***************** properties ***************** "mysql.port"="4000" "readallfields"="true" "operationcount"="0" "threadcount"="2" "mysql.host"="127.0.0.1" "mysql.db"="multi_capture_2" "recordcount"="10" "workload"="core" "scanproportion"="0" "insertproportion"="0" "readproportion"="0" "updateproportion"="0" "dotransactions"="false" "requestdistribution"="uniform" "mysql.user"="root" ********************************************** Run finished, takes 8.831186ms INSERT - Takes(s): 0.0, Count: 10, OPS: 2025.0, Avg(us): 1671, Min(us): 1012, Max(us): 3763, 95th(us): 4000, 99th(us): 4000 [Pipeline] // container [Pipeline] sh + set +x table changefeed_error.usertable not exists for 1-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] cache Changefeed remove successfully. ID: lz4 CheckpointTs: 449527834656636957 SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-lz4-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=lz4 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... ***************** properties ***************** "requestdistribution"="uniform" "workload"="core" "operationcount"="0" "mysql.port"="4000" "recordcount"="10" "mysql.db"="multi_capture_3" "threadcount"="2" "updateproportion"="0" "dotransactions"="false" "scanproportion"="0" "mysql.host"="127.0.0.1" "readallfields"="true" "insertproportion"="0" "readproportion"="0" "mysql.user"="root" ********************************************** Run finished, takes 10.057442ms INSERT - Takes(s): 0.0, Count: 10, OPS: 1776.7, Avg(us): 1913, Min(us): 1189, Max(us): 4373, 95th(us): 5000, 99th(us): 5000 + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G14 Run cases: changefeed_finish force_replicate_table PROW_JOB_ID=8011d7ec-925e-42c5-a960-c8067df92ded JENKINS_NODE_COOKIE=05c52c7f-675d-4938-a84a-3554cbd0bc05 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-s880q-kprjl HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786675747499282435","prowjobid":"8011d7ec-925e-42c5-a960-c8067df92ded","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/c950cce3a9b105fd95bb2c788e1ab69ec32e0668","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1836 TEST_GROUP=G14 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786675747499282435 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-s880q GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-s880q-kprjl pingcap_tiflow_pull_cdc_integration_kafka_test_1836-s880q GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-s880q-kprjl GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1836 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_finish/run.sh using Sink-Type: kafka... <<================= table ddl_manager.finish_mark not exists for 9-th check, retry later The 1 times to try to start tidb cluster... + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G10 Run cases: default_value simple cdc_server_tips event_filter sql_mode PROW_JOB_ID=8011d7ec-925e-42c5-a960-c8067df92ded JENKINS_NODE_COOKIE=c6dbd8b8-a558-4c68-92c2-039f396af145 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-70rl0-5g8b2 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786675747499282435","prowjobid":"8011d7ec-925e-42c5-a960-c8067df92ded","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/c950cce3a9b105fd95bb2c788e1ab69ec32e0668","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1836 TEST_GROUP=G10 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786675747499282435 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-70rl0 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-70rl0 pingcap-tiflow-pull-cdc-integration-kafka-test-1836-70rl0-5g8b2 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-70rl0-5g8b2 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1836 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/default_value/run.sh using Sink-Type: kafka... <<================= + set +x + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7989.out cli tso query --pd=http://127.0.0.1:2379 ***************** properties ***************** "insertproportion"="0" "readallfields"="true" "mysql.port"="4000" "mysql.user"="root" "dotransactions"="false" "operationcount"="0" "scanproportion"="0" "readproportion"="0" "workload"="core" "recordcount"="10" "updateproportion"="0" "requestdistribution"="uniform" "mysql.db"="multi_capture_4" "threadcount"="2" "mysql.host"="127.0.0.1" ********************************************** Run finished, takes 8.13883ms INSERT - Takes(s): 0.0, Count: 10, OPS: 2285.5, Avg(us): 1533, Min(us): 852, Max(us): 3655, 95th(us): 4000, 99th(us): 4000 [Sat May 4 16:50:10 CST 2024] <<<<<< START cdc server in multi_capture case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + GO_FAILPOINTS= + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.92159217.out server --log-file /tmp/tidb_cdc_test/multi_capture/cdc1.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_capture/cdc_data1 --cluster-id default --addr 127.0.0.1:8301 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8301 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8301; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 10-th check, retry later table changefeed_error.usertable exists check diff failed 1-th time, retry later + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G11 Run cases: resolve_lock move_table autorandom generate_column PROW_JOB_ID=8011d7ec-925e-42c5-a960-c8067df92ded JENKINS_NODE_COOKIE=bc0940ec-02cb-4f4e-9d21-3165b1498470 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tgv7p-w9730 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786675747499282435","prowjobid":"8011d7ec-925e-42c5-a960-c8067df92ded","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/c950cce3a9b105fd95bb2c788e1ab69ec32e0668","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1836 TEST_GROUP=G11 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786675747499282435 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-tgv7p GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tgv7p-w9730 pingcap_tiflow_pull_cdc_integration_kafka_test_1836-tgv7p GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tgv7p-w9730 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1836 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/resolve_lock/run.sh using Sink-Type: kafka... <<================= start tidb cluster in /tmp/tidb_cdc_test/tiflash Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... + set +x + tso='449527837015932931 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527837015932931 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.8028.out cli changefeed create --start-ts=449527837015932931 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-zstd-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=zstd' -c zstd start tidb cluster in /tmp/tidb_cdc_test/gc_safepoint Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Create changefeed successfully! ID: zstd Info: {"upstream_id":7365063833962521616,"namespace":"default","id":"zstd","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-zstd-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=zstd","create_time":"2024-05-04T16:50:12.620837732+08:00","start_ts":449527837015932931,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527837015932931,"checkpoint_ts":449527837015932931,"checkpoint_time":"2024-05-04 16:50:10.687"} PASS start tidb cluster in /tmp/tidb_cdc_test/changefeed_finish Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release coverage: 2.4% of statements in github.com/pingcap/tiflow/... Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table ddl_manager.finish_mark not exists for 11-th check, retry later check diff successfully ***************** properties ***************** "workload"="core" "dotransactions"="false" "mysql.user"="root" "threadcount"="4" "updateproportion"="0" "recordcount"="20" "mysql.port"="4000" "readproportion"="0" "mysql.host"="127.0.0.1" "operationcount"="0" "scanproportion"="0" "readallfields"="true" "requestdistribution"="uniform" "insertproportion"="0" "mysql.db"="changefeed_error" ********************************************** Run finished, takes 4.877624ms INSERT - Takes(s): 0.0, Count: 20, OPS: 6069.3, Avg(us): 819, Min(us): 408, Max(us): 1895, 95th(us): 2000, 99th(us): 2000 check diff successfully {"id":"137ed278-4720-43a4-bd44-212d56a502c7","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812601} check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/capture' 'capture' + key_prefix=/tidb/cdc/default/__cdc_meta__/capture + message=capture ++ etcdctl get /tidb/cdc/default/__cdc_meta__/capture --prefix --keys-only + info=/tidb/cdc/default/__cdc_meta__/capture/137ed278-4720-43a4-bd44-212d56a502c7 + [[ /tidb/cdc/default/__cdc_meta__/capture/137ed278-4720-43a4-bd44-212d56a502c7 =~ capture ]] + echo 'capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/137ed278-4720-43a4-bd44-212d56a502c7' capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/137ed278-4720-43a4-bd44-212d56a502c7 + echo 'check failed' check failed + exit 1 run task failed 1-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8301 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8301 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8301 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:50:13 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/61941be6-71fb-45e5-8f03-d5ecbf72f2ef {"id":"61941be6-71fb-45e5-8f03-d5ecbf72f2ef","address":"127.0.0.1:8301","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812610} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb99564d 61941be6-71fb-45e5-8f03-d5ecbf72f2ef /tidb/cdc/default/default/upstream/7365064004519949792 {"id":7365064004519949792,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/61941be6-71fb-45e5-8f03-d5ecbf72f2ef {"id":"61941be6-71fb-45e5-8f03-d5ecbf72f2ef","address":"127.0.0.1:8301","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812610} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb99564d 61941be6-71fb-45e5-8f03-d5ecbf72f2ef /tidb/cdc/default/default/upstream/7365064004519949792 {"id":7365064004519949792,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/61941be6-71fb-45e5-8f03-d5ecbf72f2ef {"id":"61941be6-71fb-45e5-8f03-d5ecbf72f2ef","address":"127.0.0.1:8301","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812610} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb99564d 61941be6-71fb-45e5-8f03-d5ecbf72f2ef /tidb/cdc/default/default/upstream/7365064004519949792 {"id":7365064004519949792,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 16:50:13 CST 2024] <<<<<< START cdc server in multi_capture case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.92719273.out server --log-file /tmp/tidb_cdc_test/multi_capture/cdc2.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_capture/cdc_data2 --cluster-id default --addr 127.0.0.1:8302 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8302 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8302; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 The 1 times to try to start tidb cluster... + set +x [Sat May 4 16:50:14 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>> Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Verifying downstream PD is started... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/05/04 16:50:12.587 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 16:50:12.617 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 16:50:12.723 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 16:50:12.731 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 16:50:13.721 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 16:50:13.729 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"]: syntax error: operand expected (error token is "[2024/05/04 16:50:12.587 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 16:50:12.617 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 16:50:12.723 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 16:50:12.731 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 16:50:13.721 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"] [2024/05/04 16:50:13.729 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"]") table test.zstd_finish_mark not exists for 1-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2fb6c00010 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fzbr4-3vph9, pid:7009, start at 2024-05-04 16:50:12.033635774 +0800 CST m=+5.224562325 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:12.042 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:12.016 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:12.016 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2fb6c00010 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fzbr4-3vph9, pid:7009, start at 2024-05-04 16:50:12.033635774 +0800 CST m=+5.224562325 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:12.042 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:12.016 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:12.016 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b2fb7840015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fzbr4-3vph9, pid:7087, start at 2024-05-04 16:50:12.104043819 +0800 CST m=+5.243885031 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:12.113 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:12.115 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:12.115 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/processor_stop_delay/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/processor_stop_delay/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/processor_stop_delay/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/processor_stop_delay/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/processor_stop_delay/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } The 1 times to try to start tidb cluster... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table ddl_manager.finish_mark not exists for 12-th check, retry later check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/capture' 'capture' + key_prefix=/tidb/cdc/default/__cdc_meta__/capture + message=capture ++ etcdctl get /tidb/cdc/default/__cdc_meta__/capture --prefix --keys-only + info=/tidb/cdc/default/__cdc_meta__/capture/137ed278-4720-43a4-bd44-212d56a502c7 + [[ /tidb/cdc/default/__cdc_meta__/capture/137ed278-4720-43a4-bd44-212d56a502c7 =~ capture ]] + echo 'capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/137ed278-4720-43a4-bd44-212d56a502c7' capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/137ed278-4720-43a4-bd44-212d56a502c7 + echo 'check failed' check failed + exit 1 run task failed 2-th time, retry later The 1 times to try to start tidb cluster... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8302 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8302 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8302 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:50:16 GMT < Content-Length: 1271 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/49659466-e2db-4491-9956-1e11a001fa2f {"id":"49659466-e2db-4491-9956-1e11a001fa2f","address":"127.0.0.1:8302","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812613} /tidb/cdc/default/__cdc_meta__/capture/61941be6-71fb-45e5-8f03-d5ecbf72f2ef {"id":"61941be6-71fb-45e5-8f03-d5ecbf72f2ef","address":"127.0.0.1:8301","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812610} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb99564d 61941be6-71fb-45e5-8f03-d5ecbf72f2ef /tidb/cdc/default/__cdc_meta__/owner/22318f42cb995682 49659466-e2db-4491-9956-1e11a001fa2f /tidb/cdc/default/default/upstream/7365064004519949792 {"id":7365064004519949792,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/49659466-e2db-4491-9956-1e11a001fa2f {"id":"49659466-e2db-4491-9956-1e11a001fa2f","address":"127.0.0.1:8302","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812613} /tidb/cdc/default/__cdc_meta__/capture/61941be6-71fb-45e5-8f03-d5ecbf72f2ef {"id":"61941be6-71fb-45e5-8f03-d5ecbf72f2ef","address":"127.0.0.1:8301","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812610} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb99564d 61941be6-71fb-45e5-8f03-d5ecbf72f2ef /tidb/cdc/default/__cdc_meta__/owner/22318f42cb995682 49659466-e2db-4491-9956-1e11a001fa2f /tidb/cdc/default/default/upstream/7365064004519949792 {"id":7365064004519949792,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/49659466-e2db-4491-9956-1e11a001fa2f {"id":"49659466-e2db-4491-9956-1e11a001fa2f","address":"127.0.0.1:8302","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812613} /tidb/cdc/default/__cdc_meta__/capture/61941be6-71fb-45e5-8f03-d5ecbf72f2ef {"id":"61941be6-71fb-45e5-8f03-d5ecbf72f2ef","address":"127.0.0.1:8301","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812610} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb99564d 61941be6-71fb-45e5-8f03-d5ecbf72f2ef /tidb/cdc/default/__cdc_meta__/owner/22318f42cb995682 49659466-e2db-4491-9956-1e11a001fa2f /tidb/cdc/default/default/upstream/7365064004519949792 {"id":7365064004519949792,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 16:50:16 CST 2024] <<<<<< START cdc server in multi_capture case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8303/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.93319333.out server --log-file /tmp/tidb_cdc_test/multi_capture/cdc3.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_capture/cdc_data3 --cluster-id default --addr 127.0.0.1:8303 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8303/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8303 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8303; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Sat May 4 16:50:17 CST 2024] <<<<<< START cdc server in processor_stop_delay case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/processorStopDelay=1*sleep(10000)' + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.processor_stop_delay.84288430.out server --log-file /tmp/tidb_cdc_test/processor_stop_delay/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/processor_stop_delay/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 start tidb cluster in /tmp/tidb_cdc_test/many_pk_or_uk Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 13-th check, retry later table test.zstd_finish_mark not exists for 2-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/default_value Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc) 3727120896 bytes in 7.61 secs (489957184 bytes/sec) [Pipeline] { [Pipeline] // withCredentials [Pipeline] } [Pipeline] container [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] cache table ddl_manager.finish_mark not exists for 14-th check, retry later table test.zstd_finish_mark not exists for 3-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8303/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8303 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8303 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8303 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:50:19 GMT < Content-Length: 1750 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/49659466-e2db-4491-9956-1e11a001fa2f {"id":"49659466-e2db-4491-9956-1e11a001fa2f","address":"127.0.0.1:8302","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812613} /tidb/cdc/default/__cdc_meta__/capture/61941be6-71fb-45e5-8f03-d5ecbf72f2ef {"id":"61941be6-71fb-45e5-8f03-d5ecbf72f2ef","address":"127.0.0.1:8301","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812610} /tidb/cdc/default/__cdc_meta__/capture/c6bf4f9a-db93-49f5-ba7f-3edba96ffe6a {"id":"c6bf4f9a-db93-49f5-ba7f-3edba96ffe6a","address":"127.0.0.1:8303","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812616} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb99564d 61941be6-71fb-45e5-8f03-d5ecbf72f2ef /tidb/cdc/default/__cdc_meta__/owner/22318f42cb995682 49659466-e2db-4491-9956-1e11a001fa2f /tidb/cdc/default/__cdc_meta__/owner/22318f42cb9956a3 c6bf4f9a-db93-49f5-ba7f-3edba96ffe6a /tidb/cdc/default/default/upstream/7365064004519949792 {"id":7365064004519949792,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/49659466-e2db-4491-9956-1e11a001fa2f {"id":"49659466-e2db-4491-9956-1e11a001fa2f","address":"127.0.0.1:8302","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812613} /tidb/cdc/default/__cdc_meta__/capture/61941be6-71fb-45e5-8f03-d5ecbf72f2ef {"id":"61941be6-71fb-45e5-8f03-d5ecbf72f2ef","address":"127.0.0.1:8301","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812610} /tidb/cdc/default/__cdc_meta__/capture/c6bf4f9a-db93-49f5-ba7f-3edba96ffe6a {"id":"c6bf4f9a-db93-49f5-ba7f-3edba96ffe6a","address":"127.0.0.1:8303","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812616} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb99564d 61941be6-71fb-45e5-8f03-d5ecbf72f2ef /tidb/cdc/default/__cdc_meta__/owner/22318f42cb995682 49659466-e2db-4491-9956-1e11a001fa2f /tidb/cdc/default/__cdc_meta__/owner/22318f42cb9956a3 c6bf4f9a-db93-49f5-ba7f-3edba96ffe6a /tidb/cdc/default/default/upstream/7365064004519949792 {"id":7365064004519949792,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/49659466-e2db-4491-9956-1e11a001fa2f {"id":"49659466-e2db-4491-9956-1e11a001fa2f","address":"127.0.0.1:8302","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812613} /tidb/cdc/default/__cdc_meta__/capture/61941be6-71fb-45e5-8f03-d5ecbf72f2ef {"id":"61941be6-71fb-45e5-8f03-d5ecbf72f2ef","address":"127.0.0.1:8301","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812610} /tidb/cdc/default/__cdc_meta__/capture/c6bf4f9a-db93-49f5-ba7f-3edba96ffe6a {"id":"c6bf4f9a-db93-49f5-ba7f-3edba96ffe6a","address":"127.0.0.1:8303","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812616} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb99564d 61941be6-71fb-45e5-8f03-d5ecbf72f2ef /tidb/cdc/default/__cdc_meta__/owner/22318f42cb995682 49659466-e2db-4491-9956-1e11a001fa2f /tidb/cdc/default/__cdc_meta__/owner/22318f42cb9956a3 c6bf4f9a-db93-49f5-ba7f-3edba96ffe6a /tidb/cdc/default/default/upstream/7365064004519949792 {"id":7365064004519949792,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.cli.9390.out cli changefeed create --start-ts=449527835374911489 '--sink-uri=kafka://127.0.0.1:9092/ticdc-multi-capture-test-28646?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --server=127.0.0.1:8301 check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/capture' 'capture' + key_prefix=/tidb/cdc/default/__cdc_meta__/capture + message=capture ++ etcdctl get /tidb/cdc/default/__cdc_meta__/capture --prefix --keys-only + info=/tidb/cdc/default/__cdc_meta__/capture/137ed278-4720-43a4-bd44-212d56a502c7 + [[ /tidb/cdc/default/__cdc_meta__/capture/137ed278-4720-43a4-bd44-212d56a502c7 =~ capture ]] + echo 'capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/137ed278-4720-43a4-bd44-212d56a502c7' capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/137ed278-4720-43a4-bd44-212d56a502c7 + echo 'check failed' check failed + exit 1 run task failed 3-th time, retry later start tidb cluster in /tmp/tidb_cdc_test/resolve_lock Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Verifying downstream PD is started... Create changefeed successfully! ID: 3719368b-26ac-48e6-8ff7-103992b9a036 Info: {"upstream_id":7365064004519949792,"namespace":"default","id":"3719368b-26ac-48e6-8ff7-103992b9a036","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-capture-test-28646?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:50:19.87943372+08:00","start_ts":449527835374911489,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527835374911489,"checkpoint_ts":449527835374911489,"checkpoint_time":"2024-05-04 16:50:04.427"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:50:20 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/97167583-de29-4159-819e-b0a8bc85cba2 {"id":"97167583-de29-4159-819e-b0a8bc85cba2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812617} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cbc37acd 97167583-de29-4159-819e-b0a8bc85cba2 /tidb/cdc/default/default/upstream/7365064050602702324 {"id":7365064050602702324,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/97167583-de29-4159-819e-b0a8bc85cba2 {"id":"97167583-de29-4159-819e-b0a8bc85cba2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812617} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cbc37acd 97167583-de29-4159-819e-b0a8bc85cba2 /tidb/cdc/default/default/upstream/7365064050602702324 {"id":7365064050602702324,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/97167583-de29-4159-819e-b0a8bc85cba2 {"id":"97167583-de29-4159-819e-b0a8bc85cba2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812617} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cbc37acd 97167583-de29-4159-819e-b0a8bc85cba2 /tidb/cdc/default/default/upstream/7365064050602702324 {"id":7365064050602702324,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 16:50:20 CST 2024] <<<<<< START kafka consumer in processor_stop_delay case >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table ddl_manager.finish_mark not exists for 15-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3033280018 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k8pxt-9vhj7, pid:1399, start at 2024-05-04 16:50:20.010618098 +0800 CST m=+5.241695888 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:20.018 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:19.978 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:19.978 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3033280018 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k8pxt-9vhj7, pid:1399, start at 2024-05-04 16:50:20.010618098 +0800 CST m=+5.241695888 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:20.018 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:19.978 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:19.978 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3034a40003 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k8pxt-9vhj7, pid:1474, start at 2024-05-04 16:50:20.077944184 +0800 CST m=+5.252771892 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:20.085 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:20.073 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:20.073 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/tiflash/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/tiflash/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/tiflash/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/tiflash/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/tiflash/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table test.zstd_finish_mark exists check diff successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.8167.out cli changefeed pause -c zstd Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table processor_stop_delay.t not exists for 1-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.tiflash.cli.2800.out cli tso query --pd=http://127.0.0.1:2379 Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.8203.out cli changefeed remove -c zstd + set +x [Sat May 4 16:50:22 CST 2024] <<<<<< START kafka consumer in multi_capture case >>>>>> table multi_capture_1.usertable not exists for 1-th check, retry later Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table processor_stop_delay.t exists check diff failed 1-th time, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3058980017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-f9n2q-3jlvd, pid:1295, start at 2024-05-04 16:50:22.419606639 +0800 CST m=+5.060058831 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:22.426 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:22.424 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:22.424 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3058980017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-f9n2q-3jlvd, pid:1295, start at 2024-05-04 16:50:22.419606639 +0800 CST m=+5.060058831 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:22.426 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:22.424 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:22.424 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b305ae40015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-f9n2q-3jlvd, pid:1379, start at 2024-05-04 16:50:22.569949501 +0800 CST m=+5.152278400 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:22.576 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:22.571 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:22.571 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/gc_safepoint/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/gc_safepoint/tiflash/log/error.log arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/gc_safepoint/tiflash-proxy.toml"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/gc_safepoint/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/gc_safepoint/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table ddl_manager.finish_mark not exists for 16-th check, retry later Changefeed remove successfully. ID: zstd CheckpointTs: 449527839794921474 SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-zstd-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=zstd PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3071f80013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-s880q-kprjl, pid:1400, start at 2024-05-04 16:50:24.029832128 +0800 CST m=+5.444445242 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:24.039 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:23.998 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:23.998 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + tso='449527840342802433 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527840342802433 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 16:50:24 CST 2024] <<<<<< START cdc server in tiflash case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.tiflash.28342836.out server --log-file /tmp/tidb_cdc_test/tiflash/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/tiflash/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table multi_capture_1.usertable not exists for 2-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x [Sat May 4 16:50:25 CST 2024] <<<<<< START cdc server in gc_safepoint case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/pkg/txnutil/gc/InjectGcSafepointUpdateInterval=return(500)' + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.gc_safepoint.28332835.out server --log-file /tmp/tidb_cdc_test/gc_safepoint/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/gc_safepoint/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table ddl_manager.finish_mark not exists for 17-th check, retry later wait process cdc.test exit for 1-th time... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff successfully wait process cdc.test exit for 2-th time... check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/capture' 'capture' + key_prefix=/tidb/cdc/default/__cdc_meta__/capture + message=capture ++ etcdctl get /tidb/cdc/default/__cdc_meta__/capture --prefix --keys-only + info= + [[ '' =~ capture ]] + echo 'check pass' check pass + exit 0 run task successfully check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/owner' 'owner' + key_prefix=/tidb/cdc/default/__cdc_meta__/owner + message=owner ++ etcdctl get /tidb/cdc/default/__cdc_meta__/owner --prefix --keys-only + info= + [[ '' =~ owner ]] + echo 'check pass' check pass + exit 0 run task successfully [Sat May 4 16:50:25 CST 2024] <<<<<< START cdc server in changefeed_error case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/NewChangefeedRetryError=return(true)' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.62206222.out server --log-file /tmp/tidb_cdc_test/changefeed_error/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_error/cdc_data --cluster-id default + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 16:50:26 CST 2024] <<<<<< run test case kafka_compression success! >>>>>> VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3071f80013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-s880q-kprjl, pid:1400, start at 2024-05-04 16:50:24.029832128 +0800 CST m=+5.444445242 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:24.039 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:23.998 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:23.998 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3072c80004 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-s880q-kprjl, pid:1489, start at 2024-05-04 16:50:24.054007037 +0800 CST m=+5.412410568 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:24.065 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:24.050 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:24.050 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/changefeed_finish/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/changefeed_finish/tiflash/log/error.log arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_finish/tiflash-proxy.toml"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_finish/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_finish/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table multi_capture_1.usertable exists table multi_capture_2.usertable exists table multi_capture_3.usertable exists table multi_capture_4.usertable not exists for 1-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Sat May 4 16:50:21 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>> schema registry uri found: 1 [Sat May 4 16:50:21 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>> schema registry uri found: 2 [Sat May 4 16:50:21 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>> schema registry uri found: 3 table test.table1 not exists for 1-th check, retry later table test.table1 not exists for 2-th check, retry later table test.table1 exists table test.table2 exists table test.table3 exists check diff successfully table test.table10 not exists for 1-th check, retry later check diff failed 1-th time, retry later table ddl_manager.finish_mark not exists for 18-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:50:27 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/4f50b39e-05da-4fe3-ac6c-20b6f1d44f25 {"id":"4f50b39e-05da-4fe3-ac6c-20b6f1d44f25","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812625} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cbea65d0 4f50b39e-05da-4fe3-ac6c-20b6f1d44f25 /tidb/cdc/default/default/upstream/7365064084250504924 {"id":7365064084250504924,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/4f50b39e-05da-4fe3-ac6c-20b6f1d44f25 {"id":"4f50b39e-05da-4fe3-ac6c-20b6f1d44f25","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812625} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cbea65d0 4f50b39e-05da-4fe3-ac6c-20b6f1d44f25 /tidb/cdc/default/default/upstream/7365064084250504924 {"id":7365064084250504924,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/4f50b39e-05da-4fe3-ac6c-20b6f1d44f25 {"id":"4f50b39e-05da-4fe3-ac6c-20b6f1d44f25","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812625} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cbea65d0 4f50b39e-05da-4fe3-ac6c-20b6f1d44f25 /tidb/cdc/default/default/upstream/7365064084250504924 {"id":7365064084250504924,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: 5aabc8f5-b035-4ff8-96d9-ed57dcd7f636 Info: {"upstream_id":7365064084250504924,"namespace":"default","id":"5aabc8f5-b035-4ff8-96d9-ed57dcd7f636","sink_uri":"kafka://127.0.0.1:9092/ticdc-tiflash-test-28874?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:50:28.201123081+08:00","start_ts":449527840342802433,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527840342802433,"checkpoint_ts":449527840342802433,"checkpoint_time":"2024-05-04 16:50:23.378"} [Sat May 4 16:50:28 CST 2024] <<<<<< START kafka consumer in tiflash case >>>>>> table test.table10 exists table test.table20 exists check diff successfully < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:50:28 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6952127f-27ae-4ddc-afd1-4154bcdfa3d4 {"id":"6952127f-27ae-4ddc-afd1-4154bcdfa3d4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812625} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 3 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb8658ce 6952127f-27ae-4ddc-afd1-4154bcdfa3d4 /tidb/cdc/default/default/changefeed/info/changefeed-error {"upstream-id":7365063988073976772,"namespace":"default","changefeed-id":"changefeed-error","sink-uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-30679?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T16:50:05.020509386+08:00","start-ts":449527834153058306,"target-ts":0,"admin-job-type":1,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"pending","error":{"time":"2024-05-04T16:50:25.880752084+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrOwnerUnknown","message":"failpoint injected retriable error"},"warning":null,"creator-version":"v8.2.0-alpha-79-gc950cce3a","epoch":449527840995016707} /tidb/cdc/default/default/changefeed/status/changefeed-error {"checkpoint-ts":449527837416751106,"min-table-barrier-ts":449527837416751108,"admin-job-type":1} /tidb/cdc/default/default/upstream/7365063988073976772 {"id":7365063988073976772,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6952127f-27ae-4ddc-afd1-4154bcdfa3d4 {"id":"6952127f-27ae-4ddc-afd1-4154bcdfa3d4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812625} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 3 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb8658ce 6952127f-27ae-4ddc-afd1-4154bcdfa3d4 /tidb/cdc/default/default/changefeed/info/changefeed-error {"upstream-id":7365063988073976772,"namespace":"default","changefeed-id":"changefeed-error","sink-uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-30679?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T16:50:05.020509386+08:00","start-ts":449527834153058306,"target-ts":0,"admin-job-type":1,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"pending","error":{"time":"2024-05-04T16:50:25.880752084+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrOwnerUnknown","message":"failpoint injected retriable error"},"warning":null,"creator-version":"v8.2.0-alpha-79-gc950cce3a","epoch":449527840995016707} /tidb/cdc/default/default/changefeed/status/changefeed-error {"checkpoint-ts":449527837416751106,"min-table-barrier-ts":449527837416751108,"admin-job-type":1} /tidb/cdc/default/default/upstream/7365063988073976772 {"id":7365063988073976772,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6952127f-27ae-4ddc-afd1-4154bcdfa3d4 {"id":"6952127f-27ae-4ddc-afd1-4154bcdfa3d4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812625} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 3 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb8658ce 6952127f-27ae-4ddc-afd1-4154bcdfa3d4 /tidb/cdc/default/default/changefeed/info/changefeed-error {"upstream-id":7365063988073976772,"namespace":"default","changefeed-id":"changefeed-error","sink-uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-30679?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T16:50:05.020509386+08:00","start-ts":449527834153058306,"target-ts":0,"admin-job-type":1,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"pending","error":{"time":"2024-05-04T16:50:25.880752084+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrOwnerUnknown","message":"failpoint injected retriable error"},"warning":null,"creator-version":"v8.2.0-alpha-79-gc950cce3a","epoch":449527840995016707} /tidb/cdc/default/default/changefeed/status/changefeed-error {"checkpoint-ts":449527837416751106,"min-table-barrier-ts":449527837416751108,"admin-job-type":1} /tidb/cdc/default/default/upstream/7365063988073976772 + grep -q 'etcd info' {"id":7365063988073976772,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + break + set +x check_changefeed_state http://127.0.0.1:2379 changefeed-error warning failpoint injected retriable error + endpoints=http://127.0.0.1:2379 + changefeed_id=changefeed-error + expected_state=warning + error_msg=failpoint + tls_dir=error + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-error -s + info='{ "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-error", "state": "warning", "checkpoint_tso": 449527837416751106, "checkpoint_time": "2024-05-04 16:50:12.216", "error": { "time": "2024-05-04T16:50:25.880752084+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrOwnerUnknown", "message": "failpoint injected retriable error" } }' + echo '{ "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-error", "state": "warning", "checkpoint_tso": 449527837416751106, "checkpoint_time": "2024-05-04 16:50:12.216", "error": { "time": "2024-05-04T16:50:25.880752084+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrOwnerUnknown", "message": "failpoint injected retriable error" } }' { "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-error", "state": "warning", "checkpoint_tso": 449527837416751106, "checkpoint_time": "2024-05-04 16:50:12.216", "error": { "time": "2024-05-04T16:50:25.880752084+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrOwnerUnknown", "message": "failpoint injected retriable error" } } ++ echo '{' '"upstream_id":' 7365063988073976772, '"namespace":' '"default",' '"id":' '"changefeed-error",' '"state":' '"warning",' '"checkpoint_tso":' 449527837416751106, '"checkpoint_time":' '"2024-05-04' '16:50:12.216",' '"error":' '{' '"time":' '"2024-05-04T16:50:25.880752084+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrOwnerUnknown",' '"message":' '"failpoint' injected retriable 'error"' '}' '}' ++ jq -r .state table cdc_tiflash_test.multi_data_type not exists for 1-th check, retry later + state=warning + [[ ! warning == \w\a\r\n\i\n\g ]] ++ echo '{' '"upstream_id":' 7365063988073976772, '"namespace":' '"default",' '"id":' '"changefeed-error",' '"state":' '"warning",' '"checkpoint_tso":' 449527837416751106, '"checkpoint_time":' '"2024-05-04' '16:50:12.216",' '"error":' '{' '"time":' '"2024-05-04T16:50:25.880752084+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrOwnerUnknown",' '"message":' '"failpoint' injected retriable 'error"' '}' '}' ++ jq -r .error.message + message='failpoint injected retriable error' + [[ ! failpoint injected retriable error =~ failpoint ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6326.out cli changefeed remove -c changefeed-error + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:50:28 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/39bf96eb-03f1-4cd1-946e-3baacadba4ca {"id":"39bf96eb-03f1-4cd1-946e-3baacadba4ca","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812626} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cbecb9c5 39bf96eb-03f1-4cd1-946e-3baacadba4ca /tidb/cdc/default/default/upstream/7365064096786937151 {"id":7365064096786937151,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/39bf96eb-03f1-4cd1-946e-3baacadba4ca {"id":"39bf96eb-03f1-4cd1-946e-3baacadba4ca","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812626} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cbecb9c5 39bf96eb-03f1-4cd1-946e-3baacadba4ca /tidb/cdc/default/default/upstream/7365064096786937151 {"id":7365064096786937151,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/39bf96eb-03f1-4cd1-946e-3baacadba4ca {"id":"39bf96eb-03f1-4cd1-946e-3baacadba4ca","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812626} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cbecb9c5 39bf96eb-03f1-4cd1-946e-3baacadba4ca /tidb/cdc/default/default/upstream/7365064096786937151 {"id":7365064096786937151,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x table multi_capture_4.usertable exists check diff failed 1-th time, retry later [Sat May 4 16:50:29 CST 2024] <<<<<< START kafka consumer in gc_safepoint case >>>>>> 0 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 19-th check, retry later check diff failed 1-th time, retry later [Sat May 4 16:50:28 CST 2024] <<<<<< START cdc server in changefeed_finish case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_finish.28072809.out server --log-file /tmp/tidb_cdc_test/changefeed_finish/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_finish/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Changefeed remove successfully. ID: changefeed-error CheckpointTs: 449527837416751106 SinkURI: kafka://127.0.0.1:9092/ticdc-sink-retry-test-30679?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b30b6d80014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-bp09q-gthp5, pid:1482, start at 2024-05-04 16:50:28.442781959 +0800 CST m=+5.194672561 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:28.450 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:28.406 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:28.406 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b30cc4c0012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tgv7p-w9730, pid:1363, start at 2024-05-04 16:50:29.799501707 +0800 CST m=+5.924152294 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:29.805 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:29.779 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:29.779 +0800 All versions after safe point can be accessed. (DO NOT EDIT) check diff failed 2-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table cdc_tiflash_test.multi_data_type not exists for 2-th check, retry later + set +x check_no_changefeed 127.0.0.1:2379 parse error: Invalid numeric literal at line 1, column 6 run task successfully VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b30b2780013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-70rl0-5g8b2, pid:1428, start at 2024-05-04 16:50:28.151537349 +0800 CST m=+5.290041621 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:28.158 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:28.126 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:28.126 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b30b2780013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-70rl0-5g8b2, pid:1428, start at 2024-05-04 16:50:28.151537349 +0800 CST m=+5.290041621 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:28.158 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:28.126 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:28.126 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b30b32c0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-70rl0-5g8b2, pid:1505, start at 2024-05-04 16:50:28.21038779 +0800 CST m=+5.289266796 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:28.217 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:28.222 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:28.222 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/default_value/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/default_value/tiflash/log/error.log arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/default_value/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/default_value/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/default_value/tiflash-proxy.toml"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table ddl_manager.finish_mark not exists for 20-th check, retry later check diff successfully ***************** properties ***************** "dotransactions"="false" "threadcount"="2" "recordcount"="20" "mysql.user"="root" "readallfields"="true" "operationcount"="0" "requestdistribution"="uniform" "mysql.host"="127.0.0.1" "updateproportion"="0" "scanproportion"="0" "mysql.port"="4000" "readproportion"="0" "mysql.db"="multi_capture_1" "insertproportion"="0" "workload"="core" ********************************************** Run finished, takes 10.085578ms INSERT - Takes(s): 0.0, Count: 20, OPS: 2261.3, Avg(us): 963, Min(us): 468, Max(us): 1617, 95th(us): 2000, 99th(us): 2000 ***************** properties ***************** "dotransactions"="false" "recordcount"="20" "operationcount"="0" "mysql.host"="127.0.0.1" "mysql.port"="4000" "updateproportion"="0" "readproportion"="0" "threadcount"="2" "scanproportion"="0" "requestdistribution"="uniform" "mysql.user"="root" "workload"="core" "mysql.db"="multi_capture_2" "readallfields"="true" "insertproportion"="0" ********************************************** Run finished, takes 11.079579ms INSERT - Takes(s): 0.0, Count: 20, OPS: 2017.9, Avg(us): 1053, Min(us): 548, Max(us): 1990, 95th(us): 2000, 99th(us): 2000 ***************** properties ***************** "mysql.db"="multi_capture_3" "scanproportion"="0" "operationcount"="0" "updateproportion"="0" "dotransactions"="false" "mysql.port"="4000" "mysql.user"="root" "threadcount"="2" "insertproportion"="0" "readproportion"="0" "requestdistribution"="uniform" "recordcount"="20" "readallfields"="true" "workload"="core" "mysql.host"="127.0.0.1" ********************************************** Run finished, takes 10.705463ms INSERT - Takes(s): 0.0, Count: 20, OPS: 2168.7, Avg(us): 1028, Min(us): 492, Max(us): 1758, 95th(us): 2000, 99th(us): 2000 ***************** properties ***************** "mysql.port"="4000" "readproportion"="0" "mysql.db"="multi_capture_4" "mysql.host"="127.0.0.1" "mysql.user"="root" "operationcount"="0" "dotransactions"="false" "recordcount"="20" "insertproportion"="0" "requestdistribution"="uniform" "threadcount"="2" "workload"="core" "updateproportion"="0" "readallfields"="true" "scanproportion"="0" ********************************************** Run finished, takes 11.724065ms INSERT - Takes(s): 0.0, Count: 20, OPS: 1902.1, Avg(us): 999, Min(us): 516, Max(us): 2084, 95th(us): 2000, 99th(us): 3000 check diff failed 1-th time, retry later wait process cdc.test exit for 1-th time... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b30b6d80014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-bp09q-gthp5, pid:1482, start at 2024-05-04 16:50:28.442781959 +0800 CST m=+5.194672561 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:28.450 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:28.406 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:28.406 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b30b93c0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-bp09q-gthp5, pid:1562, start at 2024-05-04 16:50:28.584430019 +0800 CST m=+5.283116120 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:28.596 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:28.559 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:28.559 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/many_pk_or_uk/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/many_pk_or_uk/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/many_pk_or_uk/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/many_pk_or_uk/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/many_pk_or_uk/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } check diff failed 2-th time, retry later check diff failed 3-th time, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b30cc4c0012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tgv7p-w9730, pid:1363, start at 2024-05-04 16:50:29.799501707 +0800 CST m=+5.924152294 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:29.805 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:29.779 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:29.779 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b30c2100010 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tgv7p-w9730, pid:1451, start at 2024-05-04 16:50:29.141221565 +0800 CST m=+5.206334165 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:29.148 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:29.124 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:29.124 +0800 All versions after safe point can be accessed. (DO NOT EDIT) wait process cdc.test exit for 2-th time... Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc) 3727120896 bytes in 10.23 secs (364297813 bytes/sec) [Pipeline] { < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:50:32 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/593c49ff-05e1-4689-ae45-abe8d63d7e3e {"id":"593c49ff-05e1-4689-ae45-abe8d63d7e3e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812629} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cbf14ed1 593c49ff-05e1-4689-ae45-abe8d63d7e3e /tidb/cdc/default/default/upstream/7365064094549801561 {"id":7365064094549801561,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/593c49ff-05e1-4689-ae45-abe8d63d7e3e {"id":"593c49ff-05e1-4689-ae45-abe8d63d7e3e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812629} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cbf14ed1 593c49ff-05e1-4689-ae45-abe8d63d7e3e /tidb/cdc/default/default/upstream/7365064094549801561 {"id":7365064094549801561,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/593c49ff-05e1-4689-ae45-abe8d63d7e3e {"id":"593c49ff-05e1-4689-ae45-abe8d63d7e3e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812629} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cbf14ed1 593c49ff-05e1-4689-ae45-abe8d63d7e3e /tidb/cdc/default/default/upstream/7365064094549801561 {"id":7365064094549801561,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Pipeline] sh Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/resolve_lock/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/resolve_lock/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/resolve_lock/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/resolve_lock/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/resolve_lock/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [Sat May 4 16:50:32 CST 2024] <<<<<< START kafka consumer in changefeed_finish case >>>>>> [Pipeline] container [Pipeline] { [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow [Pipeline] { [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { [Pipeline] cache cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/owner' 'owner' + key_prefix=/tidb/cdc/default/__cdc_meta__/owner + message=owner ++ etcdctl get /tidb/cdc/default/__cdc_meta__/owner --prefix --keys-only + info= + [[ '' =~ owner ]] + echo 'check pass' check pass + exit 0 run task successfully [Sat May 4 16:50:32 CST 2024] <<<<<< START cdc server in changefeed_error case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/InjectChangefeedDDLError=return(true)' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.64246426.out server --log-file /tmp/tidb_cdc_test/changefeed_error/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_error/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 check diff failed 1-th time, retry later table cdc_tiflash_test.multi_data_type exists check diff successfully wait process cdc.test exit for 1-th time... + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.default_value.cli.2904.out cli tso query --pd=http://127.0.0.1:2379 table ddl_manager.finish_mark not exists for 21-th check, retry later check diff successfully check_safepoint_forward http://127.0.0.1:2379 7365064096786937151 449527842701049865 449527841770438660 check diff failed 4-th time, retry later wait process cdc.test exit for 2-th time... check diff successfully wait process cdc.test exit for 1-th time... + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.many_pk_or_uk.cli.2899.out cli tso query --pd=http://127.0.0.1:2379 + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resolve_lock.cli.2900.out cli tso query --pd=http://127.0.0.1:2379 cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 16:50:34 CST 2024] <<<<<< run test case tiflash success! >>>>>> wait process cdc.test exit for 2-th time... run task successfully check_changefeed_state http://127.0.0.1:2379 0c3df5e4-a331-4864-8bc1-920340f5489e stopped null + endpoints=http://127.0.0.1:2379 + changefeed_id=0c3df5e4-a331-4864-8bc1-920340f5489e + expected_state=stopped + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 0c3df5e4-a331-4864-8bc1-920340f5489e -s + info='{ "upstream_id": 7365064096786937151, "namespace": "default", "id": "0c3df5e4-a331-4864-8bc1-920340f5489e", "state": "stopped", "checkpoint_tso": 449527843225600014, "checkpoint_time": "2024-05-04 16:50:34.375", "error": null }' + echo '{ "upstream_id": 7365064096786937151, "namespace": "default", "id": "0c3df5e4-a331-4864-8bc1-920340f5489e", "state": "stopped", "checkpoint_tso": 449527843225600014, "checkpoint_time": "2024-05-04 16:50:34.375", "error": null }' { "upstream_id": 7365064096786937151, "namespace": "default", "id": "0c3df5e4-a331-4864-8bc1-920340f5489e", "state": "stopped", "checkpoint_tso": 449527843225600014, "checkpoint_time": "2024-05-04 16:50:34.375", "error": null } ++ echo '{' '"upstream_id":' 7365064096786937151, '"namespace":' '"default",' '"id":' '"0c3df5e4-a331-4864-8bc1-920340f5489e",' '"state":' '"stopped",' '"checkpoint_tso":' 449527843225600014, '"checkpoint_time":' '"2024-05-04' '16:50:34.375",' '"error":' null '}' ++ jq -r .state + state=stopped + [[ ! stopped == \s\t\o\p\p\e\d ]] ++ echo '{' '"upstream_id":' 7365064096786937151, '"namespace":' '"default",' '"id":' '"0c3df5e4-a331-4864-8bc1-920340f5489e",' '"state":' '"stopped",' '"checkpoint_tso":' 449527843225600014, '"checkpoint_time":' '"2024-05-04' '16:50:34.375",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check_safepoint_equal http://127.0.0.1:2379 7365064096786937151 + set +x + tso='449527843016409090 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527843016409090 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 16:50:35 CST 2024] <<<<<< START cdc server in default_value case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.default_value.29422944.out server --log-file /tmp/tidb_cdc_test/default_value/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/default_value/cdc_data --cluster-id default + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 wait process cdc.test exit for 3-th time... + set +x + tso='449527843102916610 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527843102916610 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 16:50:35 CST 2024] <<<<<< START cdc server in many_pk_or_uk case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.many_pk_or_uk.29352937.out server --log-file /tmp/tidb_cdc_test/many_pk_or_uk/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/many_pk_or_uk/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check diff failed 2-th time, retry later cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 16:50:35 CST 2024] <<<<<< run test case multi_capture success! >>>>>> table ddl_manager.finish_mark not exists for 22-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:50:35 GMT < Content-Length: 883 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/74127980-a755-4a04-ba6e-ee770b567613 {"id":"74127980-a755-4a04-ba6e-ee770b567613","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812632} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 4 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb86592b 74127980-a755-4a04-ba6e-ee770b567613 /tidb/cdc/default/default/upstream/7365063988073976772 {"id":7365063988073976772,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/74127980-a755-4a04-ba6e-ee770b567613 {"id":"74127980-a755-4a04-ba6e-ee770b567613","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812632} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 4 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb86592b 74127980-a755-4a04-ba6e-ee770b567613 /tidb/cdc/default/default/upstream/7365063988073976772 {"id":7365063988073976772,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/74127980-a755-4a04-ba6e-ee770b567613 {"id":"74127980-a755-4a04-ba6e-ee770b567613","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812632} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 4 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb86592b 74127980-a755-4a04-ba6e-ee770b567613 /tidb/cdc/default/default/upstream/7365063988073976772 {"id":7365063988073976772,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6480.out cli changefeed create --start-ts=449527834153058306 '--sink-uri=kafka://127.0.0.1:9092/ticdc-sink-retry-test-30679?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c changefeed-error-1 + set +x + tso='449527843265970177 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527843265970177 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 16:50:36 CST 2024] <<<<<< START cdc server in resolve_lock case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resolve_lock.29282930.out server --log-file /tmp/tidb_cdc_test/resolve_lock/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/resolve_lock/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 Create changefeed successfully! ID: changefeed-error-1 Info: {"upstream_id":7365063988073976772,"namespace":"default","id":"changefeed-error-1","sink_uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-30679?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:50:36.035944732+08:00","start_ts":449527834153058306,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527834153058306,"checkpoint_ts":449527834153058306,"checkpoint_time":"2024-05-04 16:49:59.766"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... check diff failed 5-th time, retry later check diff successfully table ddl_manager.finish_mark not exists for 23-th check, retry later + set +x check_changefeed_status 127.0.0.1:8300 changefeed-error-1 warning last_warning ErrExecDDLFailed + endpoint=127.0.0.1:8300 + changefeed_id=changefeed-error-1 + expected_state=warning + field=last_warning + error_pattern=ErrExecDDLFailed ++ curl 127.0.0.1:8300/api/v2/changefeeds/changefeed-error-1/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 86 100 86 0 0 740 0 --:--:-- --:--:-- --:--:-- 741 + info='{"state":"normal","resolved_ts":449527834153058306,"checkpoint_ts":449527834153058306}' + echo '{"state":"normal","resolved_ts":449527834153058306,"checkpoint_ts":449527834153058306}' {"state":"normal","resolved_ts":449527834153058306,"checkpoint_ts":449527834153058306} ++ echo '{"state":"normal","resolved_ts":449527834153058306,"checkpoint_ts":449527834153058306}' ++ jq -r .state + state=normal + [[ ! normal == \w\a\r\n\i\n\g ]] + echo 'changefeed state normal does not equal to warning' changefeed state normal does not equal to warning + exit 1 run task failed 1-th time, retry later run task successfully check diff failed 6-th time, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:50:38 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6106c673-bcfd-46ba-a757-f105de8d40d2 {"id":"6106c673-bcfd-46ba-a757-f105de8d40d2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812635} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cc05eed6 6106c673-bcfd-46ba-a757-f105de8d40d2 /tidb/cdc/default/default/upstream/7365064114144064093 {"id":7365064114144064093,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6106c673-bcfd-46ba-a757-f105de8d40d2 {"id":"6106c673-bcfd-46ba-a757-f105de8d40d2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812635} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cc05eed6 6106c673-bcfd-46ba-a757-f105de8d40d2 /tidb/cdc/default/default/upstream/7365064114144064093 {"id":7365064114144064093,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/6106c673-bcfd-46ba-a757-f105de8d40d2 {"id":"6106c673-bcfd-46ba-a757-f105de8d40d2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812635} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cc05eed6 6106c673-bcfd-46ba-a757-f105de8d40d2 /tidb/cdc/default/default/upstream/7365064114144064093 {"id":7365064114144064093,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.default_value.cli.2995.out cli changefeed create --start-ts=449527843016409090 '--sink-uri=kafka://127.0.0.1:9092/ticdc-default-value-test-18322?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' check_changefeed_state http://127.0.0.1:2379 0c3df5e4-a331-4864-8bc1-920340f5489e normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=0c3df5e4-a331-4864-8bc1-920340f5489e + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 0c3df5e4-a331-4864-8bc1-920340f5489e -s + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:50:38 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a378e70a-aa6e-42f2-b640-7dacc86f4acb {"id":"a378e70a-aa6e-42f2-b640-7dacc86f4acb","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812635} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cc01c1d4 a378e70a-aa6e-42f2-b640-7dacc86f4acb /tidb/cdc/default/default/upstream/7365064123915518598 {"id":7365064123915518598,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a378e70a-aa6e-42f2-b640-7dacc86f4acb {"id":"a378e70a-aa6e-42f2-b640-7dacc86f4acb","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812635} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cc01c1d4 a378e70a-aa6e-42f2-b640-7dacc86f4acb /tidb/cdc/default/default/upstream/7365064123915518598 {"id":7365064123915518598,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a378e70a-aa6e-42f2-b640-7dacc86f4acb {"id":"a378e70a-aa6e-42f2-b640-7dacc86f4acb","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812635} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cc01c1d4 a378e70a-aa6e-42f2-b640-7dacc86f4acb /tidb/cdc/default/default/upstream/7365064123915518598 {"id":7365064123915518598,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.many_pk_or_uk.cli.2997.out cli changefeed create --start-ts=449527843102916610 '--sink-uri=kafka://127.0.0.1:9092/ticdc-many-pk-or-uk-test-25247?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' + info='{ "upstream_id": 7365064096786937151, "namespace": "default", "id": "0c3df5e4-a331-4864-8bc1-920340f5489e", "state": "normal", "checkpoint_tso": 449527843225600014, "checkpoint_time": "2024-05-04 16:50:34.375", "error": null }' + echo '{ "upstream_id": 7365064096786937151, "namespace": "default", "id": "0c3df5e4-a331-4864-8bc1-920340f5489e", "state": "normal", "checkpoint_tso": 449527843225600014, "checkpoint_time": "2024-05-04 16:50:34.375", "error": null }' { "upstream_id": 7365064096786937151, "namespace": "default", "id": "0c3df5e4-a331-4864-8bc1-920340f5489e", "state": "normal", "checkpoint_tso": 449527843225600014, "checkpoint_time": "2024-05-04 16:50:34.375", "error": null } ++ echo '{' '"upstream_id":' 7365064096786937151, '"namespace":' '"default",' '"id":' '"0c3df5e4-a331-4864-8bc1-920340f5489e",' '"state":' '"normal",' '"checkpoint_tso":' 449527843225600014, '"checkpoint_time":' '"2024-05-04' '16:50:34.375",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365064096786937151, '"namespace":' '"default",' '"id":' '"0c3df5e4-a331-4864-8bc1-920340f5489e",' '"state":' '"normal",' '"checkpoint_tso":' 449527843225600014, '"checkpoint_time":' '"2024-05-04' '16:50:34.375",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check_safepoint_forward http://127.0.0.1:2379 7365064096786937151 449527843225600013 449527843225600014 Create changefeed successfully! ID: 59801279-20cf-45e1-a479-320bf034688b Info: {"upstream_id":7365064114144064093,"namespace":"default","id":"59801279-20cf-45e1-a479-320bf034688b","sink_uri":"kafka://127.0.0.1:9092/ticdc-default-value-test-18322?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:50:38.790029348+08:00","start_ts":449527843016409090,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527843016409090,"checkpoint_ts":449527843016409090,"checkpoint_time":"2024-05-04 16:50:33.577"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... Create changefeed successfully! ID: 7e97aac8-7ce2-49b1-9f7b-f8a4e6feb7b3 Info: {"upstream_id":7365064123915518598,"namespace":"default","id":"7e97aac8-7ce2-49b1-9f7b-f8a4e6feb7b3","sink_uri":"kafka://127.0.0.1:9092/ticdc-many-pk-or-uk-test-25247?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:50:38.942420344+08:00","start_ts":449527843102916610,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527843102916610,"checkpoint_ts":449527843102916610,"checkpoint_time":"2024-05-04 16:50:33.907"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:50:39 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/0cf8fcba-2014-4a0a-8068-9004122cd8ab {"id":"0cf8fcba-2014-4a0a-8068-9004122cd8ab","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812636} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cc0a0ad3 0cf8fcba-2014-4a0a-8068-9004122cd8ab /tidb/cdc/default/default/upstream/7365064123084548062 {"id":7365064123084548062,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/0cf8fcba-2014-4a0a-8068-9004122cd8ab {"id":"0cf8fcba-2014-4a0a-8068-9004122cd8ab","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812636} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cc0a0ad3 0cf8fcba-2014-4a0a-8068-9004122cd8ab /tidb/cdc/default/default/upstream/7365064123084548062 {"id":7365064123084548062,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/0cf8fcba-2014-4a0a-8068-9004122cd8ab {"id":"0cf8fcba-2014-4a0a-8068-9004122cd8ab","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812636} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cc0a0ad3 0cf8fcba-2014-4a0a-8068-9004122cd8ab /tidb/cdc/default/default/upstream/7365064123084548062 {"id":7365064123084548062,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resolve_lock.cli.2987.out cli changefeed create --start-ts=449527843265970177 '--sink-uri=kafka://127.0.0.1:9092/ticdc-resolve-lock-test-12509?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' table ddl_manager.finish_mark not exists for 24-th check, retry later Create changefeed successfully! ID: 0318b3ed-7b30-499b-bc6b-89ac92062aa6 Info: {"upstream_id":7365064123084548062,"namespace":"default","id":"0318b3ed-7b30-499b-bc6b-89ac92062aa6","sink_uri":"kafka://127.0.0.1:9092/ticdc-resolve-lock-test-12509?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:50:39.663170116+08:00","start_ts":449527843265970177,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527843265970177,"checkpoint_ts":449527843265970177,"checkpoint_time":"2024-05-04 16:50:34.529"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... run task successfully check_changefeed_state http://127.0.0.1:2379 0c3df5e4-a331-4864-8bc1-920340f5489e stopped null + endpoints=http://127.0.0.1:2379 + changefeed_id=0c3df5e4-a331-4864-8bc1-920340f5489e + expected_state=stopped + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 0c3df5e4-a331-4864-8bc1-920340f5489e -s check diff successfully + set +x [Sat May 4 16:50:40 CST 2024] <<<<<< START kafka consumer in default_value case >>>>>> go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/google/uuid v1.6.0 go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading golang.org/x/time v0.5.0 go: downloading golang.org/x/sync v0.7.0 + set +x [Sat May 4 16:50:40 CST 2024] <<<<<< START kafka consumer in many_pk_or_uk case >>>>>> go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b + info='{ "upstream_id": 7365064096786937151, "namespace": "default", "id": "0c3df5e4-a331-4864-8bc1-920340f5489e", "state": "stopped", "checkpoint_tso": 449527843225600014, "checkpoint_time": "2024-05-04 16:50:34.375", "error": null }' + echo '{ "upstream_id": 7365064096786937151, "namespace": "default", "id": "0c3df5e4-a331-4864-8bc1-920340f5489e", "state": "stopped", "checkpoint_tso": 449527843225600014, "checkpoint_time": "2024-05-04 16:50:34.375", "error": null }' { "upstream_id": 7365064096786937151, "namespace": "default", "id": "0c3df5e4-a331-4864-8bc1-920340f5489e", "state": "stopped", "checkpoint_tso": 449527843225600014, "checkpoint_time": "2024-05-04 16:50:34.375", "error": null } ++ echo '{' '"upstream_id":' 7365064096786937151, '"namespace":' '"default",' '"id":' '"0c3df5e4-a331-4864-8bc1-920340f5489e",' '"state":' '"stopped",' '"checkpoint_tso":' 449527843225600014, '"checkpoint_time":' '"2024-05-04' '16:50:34.375",' '"error":' null '}' ++ jq -r .state + state=stopped + [[ ! stopped == \s\t\o\p\p\e\d ]] ++ echo '{' '"upstream_id":' 7365064096786937151, '"namespace":' '"default",' '"id":' '"0c3df5e4-a331-4864-8bc1-920340f5489e",' '"state":' '"stopped",' '"checkpoint_tso":' 449527843225600014, '"checkpoint_time":' '"2024-05-04' '16:50:34.375",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check_changefeed_state http://127.0.0.1:2379 8e22361a-9067-4e33-96cc-1f0fd5345ff4 normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=8e22361a-9067-4e33-96cc-1f0fd5345ff4 + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 8e22361a-9067-4e33-96cc-1f0fd5345ff4 -s go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading go.uber.org/atomic v1.11.0 go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading github.com/coreos/go-semver v0.3.1 check_changefeed_status 127.0.0.1:8300 changefeed-error-1 warning last_warning ErrExecDDLFailed + endpoint=127.0.0.1:8300 + changefeed_id=changefeed-error-1 + expected_state=warning + field=last_warning + error_pattern=ErrExecDDLFailed ++ curl 127.0.0.1:8300/api/v2/changefeeds/changefeed-error-1/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 253 100 253 0 0 2292 0 --:--:-- --:--:-- --:--:-- 2300 + info='{"state":"warning","resolved_ts":449527834546274312,"checkpoint_ts":449527834546274312,"last_warning":{"time":"2024-05-04T16:50:38.269891706+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrExecDDLFailed","message":"[CDC:ErrExecDDLFailed]exec DDL failed"}}' + echo '{"state":"warning","resolved_ts":449527834546274312,"checkpoint_ts":449527834546274312,"last_warning":{"time":"2024-05-04T16:50:38.269891706+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrExecDDLFailed","message":"[CDC:ErrExecDDLFailed]exec DDL failed"}}' {"state":"warning","resolved_ts":449527834546274312,"checkpoint_ts":449527834546274312,"last_warning":{"time":"2024-05-04T16:50:38.269891706+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrExecDDLFailed","message":"[CDC:ErrExecDDLFailed]exec DDL failed"}} ++ echo '{"state":"warning","resolved_ts":449527834546274312,"checkpoint_ts":449527834546274312,"last_warning":{"time":"2024-05-04T16:50:38.269891706+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrExecDDLFailed","message":"[CDC:ErrExecDDLFailed]exec' DDL 'failed"}}' ++ jq -r .state + state=warning + [[ ! warning == \w\a\r\n\i\n\g ]] + [[ -z last_warning ]] ++ echo '{"state":"warning","resolved_ts":449527834546274312,"checkpoint_ts":449527834546274312,"last_warning":{"time":"2024-05-04T16:50:38.269891706+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrExecDDLFailed","message":"[CDC:ErrExecDDLFailed]exec' DDL 'failed"}}' ++ jq -r .last_warning.message + error_msg='[CDC:ErrExecDDLFailed]exec DDL failed' + [[ ! [CDC:ErrExecDDLFailed]exec DDL failed =~ ErrExecDDLFailed ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6536.out cli changefeed remove -c changefeed-error-1 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading go.uber.org/atomic v1.11.0 go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/coreos/go-semver v0.3.1 go: downloading github.com/go-sql-driver/mysql v1.7.1 + info='{ "upstream_id": 7365064096786937151, "namespace": "default", "id": "8e22361a-9067-4e33-96cc-1f0fd5345ff4", "state": "normal", "checkpoint_tso": 449527844785094658, "checkpoint_time": "2024-05-04 16:50:40.324", "error": null }' + echo '{ "upstream_id": 7365064096786937151, "namespace": "default", "id": "8e22361a-9067-4e33-96cc-1f0fd5345ff4", "state": "normal", "checkpoint_tso": 449527844785094658, "checkpoint_time": "2024-05-04 16:50:40.324", "error": null }' { "upstream_id": 7365064096786937151, "namespace": "default", "id": "8e22361a-9067-4e33-96cc-1f0fd5345ff4", "state": "normal", "checkpoint_tso": 449527844785094658, "checkpoint_time": "2024-05-04 16:50:40.324", "error": null } ++ echo '{' '"upstream_id":' 7365064096786937151, '"namespace":' '"default",' '"id":' '"8e22361a-9067-4e33-96cc-1f0fd5345ff4",' '"state":' '"normal",' '"checkpoint_tso":' 449527844785094658, '"checkpoint_time":' '"2024-05-04' '16:50:40.324",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365064096786937151, '"namespace":' '"default",' '"id":' '"8e22361a-9067-4e33-96cc-1f0fd5345ff4",' '"state":' '"normal",' '"checkpoint_tso":' 449527844785094658, '"checkpoint_time":' '"2024-05-04' '16:50:40.324",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully check_safepoint_equal http://127.0.0.1:2379 7365064096786937151 wait process cdc.test exit for 1-th time... go: downloading google.golang.org/protobuf v1.33.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading golang.org/x/sys v0.19.0 go: downloading golang.org/x/net v0.24.0 go: downloading github.com/golang/protobuf v1.5.4 go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda go: downloading github.com/golang/protobuf v1.5.4 go: downloading golang.org/x/net v0.24.0 go: downloading google.golang.org/protobuf v1.33.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading golang.org/x/sys v0.19.0 go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda go: downloading golang.org/x/text v0.14.0 go: downloading golang.org/x/text v0.14.0 Changefeed remove successfully. ID: changefeed-error-1 CheckpointTs: 449527834546274312 SinkURI: kafka://127.0.0.1:9092/ticdc-sink-retry-test-30679?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... + set +x [Sat May 4 16:50:41 CST 2024] <<<<<< START kafka consumer in resolve_lock case >>>>>> go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading go.uber.org/atomic v1.11.0 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b wait process cdc.test exit for 2-th time... table ddl_manager.finish_mark not exists for 25-th check, retry later go: downloading go.uber.org/multierr v1.11.0 go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading github.com/coreos/go-semver v0.3.1 go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading github.com/google/uuid v1.6.0 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading github.com/golang/protobuf v1.5.4 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading golang.org/x/sync v0.7.0 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/google/btree v1.1.2 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading google.golang.org/protobuf v1.33.0 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 16:50:41 CST 2024] <<<<<< run test case processor_stop_delay success! >>>>>> =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_messages/run.sh using Sink-Type: kafka... <<================= [Sat May 4 16:50:39 CST 2024] <<<<<< run test case kafka_messages success! >>>>>> go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading golang.org/x/sys v0.19.0 go: downloading golang.org/x/net v0.24.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading golang.org/x/text v0.14.0 + set +x go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/stretchr/testify v1.9.0 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading github.com/google/btree v1.1.2 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading github.com/tidwall/btree v1.7.0 go: downloading cloud.google.com/go v0.112.2 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading golang.org/x/tools v0.20.0 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading google.golang.org/api v0.170.0 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading golang.org/x/crypto v0.22.0 go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/golang/snappy v0.0.4 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading github.com/jfcg/sixb v1.3.8 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading github.com/kr/pretty v0.3.1 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading golang.org/x/sync v0.7.0 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/tidwall/btree v1.7.0 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/google/uuid v1.6.0 go: downloading github.com/stretchr/testify v1.9.0 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading golang.org/x/tools v0.20.0 go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/google/btree v1.1.2 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading cloud.google.com/go v0.112.2 go: downloading google.golang.org/api v0.170.0 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/golang/snappy v0.0.4 go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading go.uber.org/mock v0.4.0 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading golang.org/x/time v0.5.0 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading golang.org/x/crypto v0.22.0 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading github.com/robfig/cron v1.2.0 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading github.com/jfcg/sixb v1.3.8 go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading github.com/kylelemons/godebug v1.1.0 wait process cdc.test exit for 1-th time... go: downloading github.com/apache/thrift v0.16.0 go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/robfig/cron v1.2.0 go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/kr/text v0.2.0 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading github.com/apache/thrift v0.16.0 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/kr/text v0.2.0 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading github.com/go-logr/logr v1.4.1 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/go-logr/stdr v1.2.2 go: downloading github.com/DataDog/zstd v1.5.5 go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 go: downloading github.com/go-logr/logr v1.4.1 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/go-logr/stdr v1.2.2 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_sink_error_resume/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... wait process cdc.test exit for 2-th time... go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 go: downloading github.com/DataDog/zstd v1.5.5 table ddl_manager.finish_mark not exists for 26-th check, retry later go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/stretchr/testify v1.9.0 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/dolthub/swiss v0.2.1 go: downloading golang.org/x/tools v0.20.0 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading github.com/golang/snappy v0.0.4 go: downloading github.com/tidwall/btree v1.7.0 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading github.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading google.golang.org/api v0.170.0 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading golang.org/x/time v0.5.0 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/vbauerster/mpb/v7 v7.5.3 go: downloading golang.org/x/term v0.19.0 go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/spf13/cobra v1.8.0 go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading cloud.google.com/go v0.112.2 go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading golang.org/x/crypto v0.22.0 go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading github.com/Masterminds/semver v1.5.0 go: downloading github.com/robfig/cron v1.2.0 go: downloading k8s.io/api v0.28.6 go: downloading github.com/jfcg/sixb v1.3.8 go: downloading github.com/emirpasic/gods v1.18.1 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/apache/thrift v0.16.0 go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/kr/text v0.2.0 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading cloud.google.com/go/iam v1.1.7 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da run task successfully go: downloading github.com/go-logr/logr v1.4.1 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/go-logr/stdr v1.2.2 Changefeed remove successfully. ID: 0c3df5e4-a331-4864-8bc1-920340f5489e CheckpointTs: 449527843225600014 SinkURI: kafka://127.0.0.1:9092/ticdc-gc-safepoint-7336?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 check_safepoint_forward http://127.0.0.1:2379 7365064096786937151 449527844785094657 449527843225600014 449527844785094658 cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/owner' 'owner' + key_prefix=/tidb/cdc/default/__cdc_meta__/owner + message=owner ++ etcdctl get /tidb/cdc/default/__cdc_meta__/owner --prefix --keys-only + info= + [[ '' =~ owner ]] + echo 'check pass' check pass + exit 0 run task successfully [Sat May 4 16:50:44 CST 2024] <<<<<< START cdc server in changefeed_error case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS='github.com/pingcap/tiflow/pkg/txnutil/gc/InjectActualGCSafePoint=return(9223372036854775807)' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.66096611.out server --log-file /tmp/tidb_cdc_test/changefeed_error/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_error/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 go: downloading github.com/DataDog/zstd v1.5.5 go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 go: downloading k8s.io/apimachinery v0.28.6 go: downloading gopkg.in/inf.v0 v0.9.1 go: downloading k8s.io/klog/v2 v2.120.1 go: downloading github.com/google/gofuzz v1.2.0 go: downloading k8s.io/utils v0.0.0-20230726121419-3b25d923346b go: downloading sigs.k8s.io/structured-merge-diff/v4 v4.4.1 go: downloading sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd run task successfully table ddl_manager.finish_mark not exists for 27-th check, retry later Changefeed remove successfully. ID: 8e22361a-9067-4e33-96cc-1f0fd5345ff4 CheckpointTs: 449527845860147202 SinkURI: kafka://127.0.0.1:9092/ticdc-gc-safepoint-7336?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 check_safepoint_cleared http://127.0.0.1:2379 7365064096786937151 run task successfully wait process cdc.test exit for 1-th time... =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/region_merge/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... go: downloading github.com/google/s2a-go v0.1.7 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading github.com/felixge/httpsnoop v1.0.4 go: downloading github.com/jmespath/go-jmespath v0.4.0 start tidb cluster in /tmp/tidb_cdc_test/kafka_sink_error_resume Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... wait process cdc.test exit for 2-th time... go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/google/s2a-go v0.1.7 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading github.com/felixge/httpsnoop v1.0.4 go: downloading github.com/jmespath/go-jmespath v0.4.0 Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc) 3727120896 bytes in 11.61 secs (320981244 bytes/sec) [Pipeline] { [Pipeline] sh + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:50:47 GMT < Content-Length: 883 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/32970199-7c92-4efd-be52-718e95dc02c9 {"id":"32970199-7c92-4efd-be52-718e95dc02c9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812644} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 5 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb8659b6 32970199-7c92-4efd-be52-718e95dc02c9 /tidb/cdc/default/default/upstream/7365063988073976772 {"id":7365063988073976772,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/32970199-7c92-4efd-be52-718e95dc02c9 {"id":"32970199-7c92-4efd-be52-718e95dc02c9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812644} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 5 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb8659b6 32970199-7c92-4efd-be52-718e95dc02c9 /tidb/cdc/default/default/upstream/7365063988073976772 {"id":7365063988073976772,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/32970199-7c92-4efd-be52-718e95dc02c9 {"id":"32970199-7c92-4efd-be52-718e95dc02c9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812644} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 5 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb8659b6 32970199-7c92-4efd-be52-718e95dc02c9 /tidb/cdc/default/default/upstream/7365063988073976772 {"id":7365063988073976772,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6666.out cli changefeed create --start-ts=449527834153058306 '--sink-uri=kafka://127.0.0.1:9092/ticdc-sink-retry-test-30679?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c changefeed-error-2 go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd [Pipeline] container [Pipeline] { table ddl_manager.finish_mark not exists for 28-th check, retry later [Pipeline] timeout Timeout set to expire in 6 min 0 sec [Pipeline] { Create changefeed successfully! ID: changefeed-error-2 Info: {"upstream_id":7365063988073976772,"namespace":"default","id":"changefeed-error-2","sink_uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-30679?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:50:47.542072569+08:00","start_ts":449527834153058306,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527834153058306,"checkpoint_ts":449527834153058306,"checkpoint_time":"2024-05-04 16:49:59.766"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... [Pipeline] sh =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/run.sh using Sink-Type: kafka... <<================= +++ dirname /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/run.sh ++ cd /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo ++ pwd + CUR=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo + source /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/../_utils/test_prepare ++ UP_TIDB_HOST=127.0.0.1 ++ UP_TIDB_PORT=4000 ++ UP_TIDB_OTHER_PORT=4001 ++ UP_TIDB_STATUS=10080 ++ UP_TIDB_OTHER_STATUS=10081 ++ DOWN_TIDB_HOST=127.0.0.1 ++ DOWN_TIDB_PORT=3306 ++ DOWN_TIDB_STATUS=20080 ++ TLS_TIDB_HOST=127.0.0.1 ++ TLS_TIDB_PORT=3307 ++ TLS_TIDB_STATUS=30080 ++ UP_PD_HOST_1=127.0.0.1 ++ UP_PD_PORT_1=2379 ++ UP_PD_PEER_PORT_1=2380 ++ UP_PD_HOST_2=127.0.0.1 ++ UP_PD_PORT_2=2679 ++ UP_PD_PEER_PORT_2=2680 ++ UP_PD_HOST_3=127.0.0.1 ++ UP_PD_PORT_3=2779 ++ UP_PD_PEER_PORT_3=2780 ++ DOWN_PD_HOST=127.0.0.1 ++ DOWN_PD_PORT=2479 ++ DOWN_PD_PEER_PORT=2480 ++ TLS_PD_HOST=127.0.0.1 ++ TLS_PD_PORT=2579 ++ TLS_PD_PEER_PORT=2580 ++ UP_TIKV_HOST_1=127.0.0.1 ++ UP_TIKV_PORT_1=20160 ++ UP_TIKV_STATUS_PORT_1=20181 ++ UP_TIKV_HOST_2=127.0.0.1 ++ UP_TIKV_PORT_2=20161 ++ UP_TIKV_STATUS_PORT_2=20182 ++ UP_TIKV_HOST_3=127.0.0.1 ++ UP_TIKV_PORT_3=20162 ++ UP_TIKV_STATUS_PORT_3=20183 ++ DOWN_TIKV_HOST=127.0.0.1 ++ DOWN_TIKV_PORT=21160 ++ DOWN_TIKV_STATUS_PORT=21180 ++ TLS_TIKV_HOST=127.0.0.1 ++ TLS_TIKV_PORT=22160 ++ TLS_TIKV_STATUS_PORT=22180 +++ cat /tmp/tidb_cdc_test/KAFKA_VERSION +++ echo 2.4.1 ++ KAFKA_VERSION=2.4.1 + WORK_DIR=/tmp/tidb_cdc_test/synced_status_with_redo + CDC_BINARY=cdc.test + SINK_TYPE=kafka + CDC_COUNT=3 + DB_COUNT=4 + trap stop_tidb_cluster EXIT + run_normal_case_and_unavailable_pd conf/changefeed-redo.toml + rm -rf /tmp/tidb_cdc_test/synced_status_with_redo + mkdir -p /tmp/tidb_cdc_test/synced_status_with_redo + start_tidb_cluster --workdir /tmp/tidb_cdc_test/synced_status_with_redo The 1 times to try to start tidb cluster... + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/google/s2a-go v0.1.7 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading github.com/jmespath/go-jmespath v0.4.0 go: downloading github.com/felixge/httpsnoop v1.0.4 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd wait process cdc.test exit for 3-th time... + echo Waiting for zookeeper to be ready... Waiting for zookeeper to be ready... + nc -z localhost 2181 + echo Waiting for kafka to be ready... Waiting for kafka to be ready... + nc -z localhost 9092 + echo Waiting for kafka-broker to be ready... Waiting for kafka-broker to be ready... + echo dump + nc localhost 2181 + grep brokers + awk {$1=$1;print} + grep -F -w /brokers/ids/1 /brokers/ids/1 Verifying downstream PD is started... cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 16:50:48 CST 2024] <<<<<< run test case gc_safepoint success! >>>>>> + set +x check_changefeed_state http://127.0.0.1:2379 changefeed-error-2 failed [CDC:ErrSnapshotLostByGC] + endpoints=http://127.0.0.1:2379 + changefeed_id=changefeed-error-2 + expected_state=failed + error_msg='[CDC:ErrSnapshotLostByGC]' + tls_dir='[CDC:ErrSnapshotLostByGC]' + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-error-2 -s start tidb cluster in /tmp/tidb_cdc_test/region_merge Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... + info='{ "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-error-2", "state": "failed", "checkpoint_tso": 449527834153058306, "checkpoint_time": "2024-05-04 16:49:59.766", "error": { "time": "2024-05-04T16:50:47.627189119+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSnapshotLostByGC", "message": "[CDC:ErrSnapshotLostByGC]fail to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449527834153058306 is earlier than or equal to GC safepoint at 9223372036854775807" } }' + echo '{ "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-error-2", "state": "failed", "checkpoint_tso": 449527834153058306, "checkpoint_time": "2024-05-04 16:49:59.766", "error": { "time": "2024-05-04T16:50:47.627189119+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSnapshotLostByGC", "message": "[CDC:ErrSnapshotLostByGC]fail to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449527834153058306 is earlier than or equal to GC safepoint at 9223372036854775807" } }' { "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-error-2", "state": "failed", "checkpoint_tso": 449527834153058306, "checkpoint_time": "2024-05-04 16:49:59.766", "error": { "time": "2024-05-04T16:50:47.627189119+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrSnapshotLostByGC", "message": "[CDC:ErrSnapshotLostByGC]fail to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449527834153058306 is earlier than or equal to GC safepoint at 9223372036854775807" } } ++ jq -r .state ++ echo '{' '"upstream_id":' 7365063988073976772, '"namespace":' '"default",' '"id":' '"changefeed-error-2",' '"state":' '"failed",' '"checkpoint_tso":' 449527834153058306, '"checkpoint_time":' '"2024-05-04' '16:49:59.766",' '"error":' '{' '"time":' '"2024-05-04T16:50:47.627189119+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrSnapshotLostByGC",' '"message":' '"[CDC:ErrSnapshotLostByGC]fail' to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449527834153058306 is earlier than or equal to GC safepoint at '9223372036854775807"' '}' '}' + state=failed + [[ ! failed == \f\a\i\l\e\d ]] ++ jq -r .error.message ++ echo '{' '"upstream_id":' 7365063988073976772, '"namespace":' '"default",' '"id":' '"changefeed-error-2",' '"state":' '"failed",' '"checkpoint_tso":' 449527834153058306, '"checkpoint_time":' '"2024-05-04' '16:49:59.766",' '"error":' '{' '"time":' '"2024-05-04T16:50:47.627189119+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrSnapshotLostByGC",' '"message":' '"[CDC:ErrSnapshotLostByGC]fail' to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449527834153058306 is earlier than or equal to GC safepoint at '9223372036854775807"' '}' '}' + message='[CDC:ErrSnapshotLostByGC]fail to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449527834153058306 is earlier than or equal to GC safepoint at 9223372036854775807' + [[ ! [CDC:ErrSnapshotLostByGC]fail to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449527834153058306 is earlier than or equal to GC safepoint at 9223372036854775807 =~ \[CDC:ErrSnapshotLostByGC] ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6753.out cli changefeed remove -c changefeed-error-2 Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table ddl_manager.finish_mark not exists for 29-th check, retry later [Pipeline] // timeout [Pipeline] } Changefeed remove successfully. ID: changefeed-error-2 CheckpointTs: 449527834153058306 SinkURI: kafka://127.0.0.1:9092/ticdc-sink-retry-test-30679?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... Verifying downstream PD is started... table ddl_manager.finish_mark not exists for 30-th check, retry later + set +x wait process cdc.test exit for 1-th time... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release wait process cdc.test exit for 2-th time... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 16:50:52 CST 2024] <<<<<< START cdc server in changefeed_error case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/redo/ChangefeedNewRedoManagerError=2*return(true)' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.67956797.out server --log-file /tmp/tidb_cdc_test/changefeed_error/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_error/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/partition_table/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table ddl_manager.finish_mark not exists for 31-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore start tidb cluster in /tmp/tidb_cdc_test/synced_status_with_redo Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 32-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:50:56 GMT < Content-Length: 883 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f07b4c17-d577-4600-89b4-20f478dc4575 {"id":"f07b4c17-d577-4600-89b4-20f478dc4575","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812653} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 7 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb865a16 f07b4c17-d577-4600-89b4-20f478dc4575 /tidb/cdc/default/default/upstream/7365063988073976772 {"id":7365063988073976772,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f07b4c17-d577-4600-89b4-20f478dc4575 {"id":"f07b4c17-d577-4600-89b4-20f478dc4575","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812653} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 7 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb865a16 f07b4c17-d577-4600-89b4-20f478dc4575 /tidb/cdc/default/default/upstream/7365063988073976772 {"id":7365063988073976772,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/f07b4c17-d577-4600-89b4-20f478dc4575 {"id":"f07b4c17-d577-4600-89b4-20f478dc4575","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812653} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count 7 /tidb/cdc/default/__cdc_meta__/owner/22318f42cb865a16 f07b4c17-d577-4600-89b4-20f478dc4575 /tidb/cdc/default/default/upstream/7365063988073976772 {"id":7365063988073976772,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6845.out cli changefeed create --start-ts=0 '--sink-uri=kafka://127.0.0.1:9092/ticdc-sink-retry-test-30679?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c changefeed-initialize-error [Pipeline] } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Create changefeed successfully! ID: changefeed-initialize-error Info: {"upstream_id":7365063988073976772,"namespace":"default","id":"changefeed-initialize-error","sink_uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-30679?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:50:56.552101775+08:00","start_ts":449527849003253763,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527849003253763,"checkpoint_ts":449527849003253763,"checkpoint_time":"2024-05-04 16:50:56.415"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... [Pipeline] } start tidb cluster in /tmp/tidb_cdc_test/partition_table Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... [Pipeline] } [Pipeline] // stage [Pipeline] } table ddl_manager.finish_mark not exists for 33-th check, retry later [Pipeline] // timeout [Pipeline] } [Pipeline] // timeout [Pipeline] // timeout [Pipeline] } [Pipeline] } + set +x check_changefeed_state http://127.0.0.1:2379 changefeed-initialize-error normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=changefeed-initialize-error + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-initialize-error -s + info='{ "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-initialize-error", "state": "warning", "checkpoint_tso": 449527849003253763, "checkpoint_time": "2024-05-04 16:50:56.415", "error": { "time": "2024-05-04T16:50:56.733529374+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrProcessorUnknown", "message": "changefeed new redo manager injected error" } }' + echo '{ "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-initialize-error", "state": "warning", "checkpoint_tso": 449527849003253763, "checkpoint_time": "2024-05-04 16:50:56.415", "error": { "time": "2024-05-04T16:50:56.733529374+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrProcessorUnknown", "message": "changefeed new redo manager injected error" } }' { "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-initialize-error", "state": "warning", "checkpoint_tso": 449527849003253763, "checkpoint_time": "2024-05-04 16:50:56.415", "error": { "time": "2024-05-04T16:50:56.733529374+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrProcessorUnknown", "message": "changefeed new redo manager injected error" } } ++ echo '{' '"upstream_id":' 7365063988073976772, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"warning",' '"checkpoint_tso":' 449527849003253763, '"checkpoint_time":' '"2024-05-04' '16:50:56.415",' '"error":' '{' '"time":' '"2024-05-04T16:50:56.733529374+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrProcessorUnknown",' '"message":' '"changefeed' new redo manager injected 'error"' '}' '}' ++ jq -r .state ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] // container [Pipeline] } [Pipeline] // container [Pipeline] sh + state=warning + [[ ! warning == \n\o\r\m\a\l ]] + echo 'changefeed state warning does not equal to normal' changefeed state warning does not equal to normal + exit 1 run task failed 1-th time, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [Pipeline] // container [Pipeline] // container [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G15 Run cases: new_ci_collation batch_add_table multi_rocks PROW_JOB_ID=8011d7ec-925e-42c5-a960-c8067df92ded JENKINS_NODE_COOKIE=62356c54-54e4-45a9-9bcf-ee7ed05226ce BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-9z3t4-2gjxq HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786675747499282435","prowjobid":"8011d7ec-925e-42c5-a960-c8067df92ded","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/c950cce3a9b105fd95bb2c788e1ab69ec32e0668","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1836 TEST_GROUP=G15 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786675747499282435 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-9z3t4 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-9z3t4-2gjxq pingcap_tiflow_pull_cdc_integration_kafka_test_1836-9z3t4 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-9z3t4-2gjxq GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1836 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/new_ci_collation/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] sh + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G17 Run cases: clustered_index processor_resolved_ts_fallback PROW_JOB_ID=8011d7ec-925e-42c5-a960-c8067df92ded JENKINS_NODE_COOKIE=05460935-6370-4e29-b259-69d472a59db6 BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-td6bn-w0ksf HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786675747499282435","prowjobid":"8011d7ec-925e-42c5-a960-c8067df92ded","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/c950cce3a9b105fd95bb2c788e1ab69ec32e0668","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1836 TEST_GROUP=G17 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786675747499282435 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-td6bn GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-td6bn pingcap-tiflow-pull-cdc-integration-kafka-test-1836-td6bn-w0ksf GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-td6bn-w0ksf GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1836 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/clustered_index/run.sh using Sink-Type: kafka... <<================= [Sat May 4 16:50:59 CST 2024] <<<<<< skip test case clustered_index for kafka! >>>>>> find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_resolved_ts_fallback/run.sh using Sink-Type: kafka... <<================= [Sat May 4 16:50:59 CST 2024] <<<<<< run test case processor_resolved_ts_fallback success! >>>>>> [Pipeline] // withEnv [Pipeline] } table ddl_manager.finish_mark not exists for 34-th check, retry later + rm -rf /tmp/tidb_cdc_test + mkdir -p /tmp/tidb_cdc_test + chmod +x ./tests/integration_tests/run_group.sh + ./tests/integration_tests/run_group.sh kafka G16 Run cases: owner_resign processor_etcd_worker_delay sink_hang PROW_JOB_ID=8011d7ec-925e-42c5-a960-c8067df92ded JENKINS_NODE_COOKIE=211cfbbb-d12f-43db-b78d-35ddabcb585c BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k7l3s-qm8cw HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT_443_TCP_PORT=443 KUBERNETES_PORT=tcp://10.233.0.1:443 TERM=xterm STAGE_NAME=Test BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009 JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786675747499282435","prowjobid":"8011d7ec-925e-42c5-a960-c8067df92ded","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/c950cce3a9b105fd95bb2c788e1ab69ec32e0668","author_link":"https://github.com/lidezhu"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=03312178c534dce949face80c69812d989e55009 PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow HUDSON_URL=https://do.pingcap.net/jenkins/ TICDC_COVERALLS_TOKEN=**** JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#1836 TEST_GROUP=G16 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1786675747499282435 TICDC_CODECOV_TOKEN=**** GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=pull_cdc_integration_kafka_test GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009 RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1836/display/redirect?page=tests SHLVL=5 HOME=/home/jenkins POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-k7l3s GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp TINI_VERSION=v0.19.0 CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1836-k7l3s pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k7l3s-qm8cw GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k7l3s-qm8cw GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect BUILD_NUMBER=1836 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz _=/usr/bin/env find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/owner_resign/run.sh using Sink-Type: kafka... <<================= [Sat May 4 16:50:59 CST 2024] <<<<<< run test case owner_resign success! >>>>>> [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] } Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release check_changefeed_state http://127.0.0.1:2379 changefeed-initialize-error normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=changefeed-initialize-error + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-initialize-error -s Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... [Sat May 4 16:50:58 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>> schema registry uri found: 10 [Sat May 4 16:50:58 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>> schema registry uri found: 20 [Sat May 4 16:50:58 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>> schema registry uri found: finish table test.finish not exists for 1-th check, retry later + info='{ "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-initialize-error", "state": "warning", "checkpoint_tso": 449527849003253763, "checkpoint_time": "2024-05-04 16:50:56.415", "error": { "time": "2024-05-04T16:50:56.733529374+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrProcessorUnknown", "message": "changefeed new redo manager injected error" } }' + echo '{ "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-initialize-error", "state": "warning", "checkpoint_tso": 449527849003253763, "checkpoint_time": "2024-05-04 16:50:56.415", "error": { "time": "2024-05-04T16:50:56.733529374+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrProcessorUnknown", "message": "changefeed new redo manager injected error" } }' { "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-initialize-error", "state": "warning", "checkpoint_tso": 449527849003253763, "checkpoint_time": "2024-05-04 16:50:56.415", "error": { "time": "2024-05-04T16:50:56.733529374+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrProcessorUnknown", "message": "changefeed new redo manager injected error" } } ++ echo '{' '"upstream_id":' 7365063988073976772, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"warning",' '"checkpoint_tso":' 449527849003253763, '"checkpoint_time":' '"2024-05-04' '16:50:56.415",' '"error":' '{' '"time":' '"2024-05-04T16:50:56.733529374+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrProcessorUnknown",' '"message":' '"changefeed' new redo manager injected 'error"' '}' '}' ++ jq -r .state + state=warning + [[ ! warning == \n\o\r\m\a\l ]] + echo 'changefeed state warning does not equal to normal' changefeed state warning does not equal to normal + exit 1 run task failed 2-th time, retry later Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b32992c0019 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844, pid:9437, start at 2024-05-04 16:50:59.307907866 +0800 CST m=+5.156444647 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:59.314 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:59.275 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:59.275 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b32992c0019 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844, pid:9437, start at 2024-05-04 16:50:59.307907866 +0800 CST m=+5.156444647 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:59.314 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:59.275 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:59.275 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b329aa80014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844, pid:9523, start at 2024-05-04 16:50:59.39511766 +0800 CST m=+5.189464474 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:59.401 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:59.370 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:59.370 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b32a07c0008 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k8pxt-9vhj7, pid:4190, start at 2024-05-04 16:50:59.748684266 +0800 CST m=+5.433656919 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:59.755 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:59.743 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:59.743 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.finish not exists for 2-th check, retry later table ddl_manager.finish_mark not exists for 35-th check, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) start tidb cluster in /tmp/tidb_cdc_test/new_ci_collation Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Sat May 4 16:51:02 CST 2024] <<<<<< START cdc server in kafka_sink_error_resume case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/sink/dmlsink/mq/dmlproducer/KafkaSinkAsyncSendError=1*return(true)' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_sink_error_resume.1085410856.out server --log-file /tmp/tidb_cdc_test/kafka_sink_error_resume/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_sink_error_resume/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 \033[0;36m<<< Run all test success >>>\033[0m [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc already exists) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b32a07c0008 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k8pxt-9vhj7, pid:4190, start at 2024-05-04 16:50:59.748684266 +0800 CST m=+5.433656919 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:59.755 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:59.743 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:59.743 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b32a0680011 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k8pxt-9vhj7, pid:4267, start at 2024-05-04 16:50:59.755411869 +0800 CST m=+5.389854927 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:52:59.762 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:50:59.738 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:40:59.738 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/region_merge/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/region_merge/tiflash/log/error.log arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/region_merge/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/region_merge/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/region_merge/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_etcd_worker_delay/run.sh using Sink-Type: kafka... <<================= [Sat May 4 16:51:02 CST 2024] <<<<<< run test case processor_etcd_worker_delay success! >>>>>> [Pipeline] // withCredentials [Pipeline] } table test.finish exists check diff successfully [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_pause_resume/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... wait process cdc.test exit for 1-th time... [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } table ddl_manager.finish_mark not exists for 36-th check, retry later [Pipeline] // stage ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] } wait process cdc.test exit for 2-th time... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 3-th time... [Sat May 4 16:51:04 CST 2024] <<<<<< START cdc server in region_merge case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.region_merge.56425644.out server --log-file /tmp/tidb_cdc_test/region_merge/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/region_merge/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 check_changefeed_state http://127.0.0.1:2379 changefeed-initialize-error normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=changefeed-initialize-error + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-initialize-error -s + info='{ "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-initialize-error", "state": "normal", "checkpoint_tso": 449527851008917506, "checkpoint_time": "2024-05-04 16:51:04.066", "error": null }' + echo '{ "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-initialize-error", "state": "normal", "checkpoint_tso": 449527851008917506, "checkpoint_time": "2024-05-04 16:51:04.066", "error": null }' { "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-initialize-error", "state": "normal", "checkpoint_tso": 449527851008917506, "checkpoint_time": "2024-05-04 16:51:04.066", "error": null } ++ echo '{' '"upstream_id":' 7365063988073976772, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"normal",' '"checkpoint_tso":' 449527851008917506, '"checkpoint_time":' '"2024-05-04' '16:51:04.066",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365063988073976772, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"normal",' '"checkpoint_tso":' 449527851008917506, '"checkpoint_time":' '"2024-05-04' '16:51:04.066",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.7041.out cli changefeed pause -c changefeed-initialize-error cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 16:51:04 CST 2024] <<<<<< run test case multi_topics_v2 success! >>>>>> PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... table ddl_manager.finish_mark not exists for 37-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:51:05 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2f15816e-efeb-41d7-8fba-6359112be70b {"id":"2f15816e-efeb-41d7-8fba-6359112be70b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812662} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cc7571cf 2f15816e-efeb-41d7-8fba-6359112be70b /tidb/cdc/default/default/upstream/7365064239449571126 {"id":7365064239449571126,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2f15816e-efeb-41d7-8fba-6359112be70b {"id":"2f15816e-efeb-41d7-8fba-6359112be70b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812662} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cc7571cf 2f15816e-efeb-41d7-8fba-6359112be70b /tidb/cdc/default/default/upstream/7365064239449571126 {"id":7365064239449571126,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2f15816e-efeb-41d7-8fba-6359112be70b {"id":"2f15816e-efeb-41d7-8fba-6359112be70b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812662} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cc7571cf 2f15816e-efeb-41d7-8fba-6359112be70b /tidb/cdc/default/default/upstream/7365064239449571126 {"id":7365064239449571126,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 16:51:05 CST 2024] <<<<<< START kafka consumer in kafka_sink_error_resume case >>>>>> check_changefeed_status 127.0.0.1:8300 cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3 warning last_warning kafka sink injected error + endpoint=127.0.0.1:8300 + changefeed_id=cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3 + expected_state=warning + field=last_warning + error_pattern=kafka ++ curl 127.0.0.1:8300/api/v2/changefeeds/cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 86 100 86 0 0 811 0 --:--:-- --:--:-- --:--:-- 819 + info='{"state":"normal","resolved_ts":449527851417862149,"checkpoint_ts":449527851417862149}' + echo '{"state":"normal","resolved_ts":449527851417862149,"checkpoint_ts":449527851417862149}' {"state":"normal","resolved_ts":449527851417862149,"checkpoint_ts":449527851417862149} ++ echo '{"state":"normal","resolved_ts":449527851417862149,"checkpoint_ts":449527851417862149}' ++ jq -r .state find: '/tmp/tidb_cdc_test/*/*': No such file or directory =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sink_hang/run.sh using Sink-Type: kafka... <<================= [Sat May 4 16:51:06 CST 2024] <<<<<< run test case sink_hang success! >>>>>> + state=normal + [[ ! normal == \w\a\r\n\i\n\g ]] + echo 'changefeed state normal does not equal to warning' changefeed state normal does not equal to warning + exit 1 run task failed 1-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release + set +x check_changefeed_state http://127.0.0.1:2379 changefeed-initialize-error stopped changefeed new redo manager injected error + endpoints=http://127.0.0.1:2379 + changefeed_id=changefeed-initialize-error + expected_state=stopped + error_msg=changefeed + tls_dir=error + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-initialize-error -s + info='{ "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-initialize-error", "state": "stopped", "checkpoint_tso": 449527851008917506, "checkpoint_time": "2024-05-04 16:51:04.066", "error": { "time": "2024-05-04T16:50:56.733529374+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrProcessorUnknown", "message": "changefeed new redo manager injected error" } }' + echo '{ "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-initialize-error", "state": "stopped", "checkpoint_tso": 449527851008917506, "checkpoint_time": "2024-05-04 16:51:04.066", "error": { "time": "2024-05-04T16:50:56.733529374+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrProcessorUnknown", "message": "changefeed new redo manager injected error" } }' { "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-initialize-error", "state": "stopped", "checkpoint_tso": 449527851008917506, "checkpoint_time": "2024-05-04 16:51:04.066", "error": { "time": "2024-05-04T16:50:56.733529374+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrProcessorUnknown", "message": "changefeed new redo manager injected error" } } ++ echo '{' '"upstream_id":' 7365063988073976772, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"stopped",' '"checkpoint_tso":' 449527851008917506, '"checkpoint_time":' '"2024-05-04' '16:51:04.066",' '"error":' '{' '"time":' '"2024-05-04T16:50:56.733529374+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrProcessorUnknown",' '"message":' '"changefeed' new redo manager injected 'error"' '}' '}' ++ jq -r .state + state=stopped + [[ ! stopped == \s\t\o\p\p\e\d ]] ++ echo '{' '"upstream_id":' 7365063988073976772, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"stopped",' '"checkpoint_tso":' 449527851008917506, '"checkpoint_time":' '"2024-05-04' '16:51:04.066",' '"error":' '{' '"time":' '"2024-05-04T16:50:56.733529374+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrProcessorUnknown",' '"message":' '"changefeed' new redo manager injected 'error"' '}' '}' ++ jq -r .error.message + message='changefeed new redo manager injected error' + [[ ! changefeed new redo manager injected error =~ changefeed ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.7123.out cli changefeed resume -c changefeed-initialize-error ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) start tidb cluster in /tmp/tidb_cdc_test/changefeed_pause_resume Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table ddl_manager.finish_mark not exists for 38-th check, retry later PASS coverage: 2.1% of statements in github.com/pingcap/tiflow/... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:51:07 GMT < Content-Length: 859 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e147cbd5-4164-4c07-9671-d973509c7572 {"id":"e147cbd5-4164-4c07-9671-d973509c7572","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812664} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cc7ee1a3 e147cbd5-4164-4c07-9671-d973509c7572 /tidb/cdc/default/default/upstream/7365064254232088616 {"id":7365064254232088616,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e147cbd5-4164-4c07-9671-d973509c7572 {"id":"e147cbd5-4164-4c07-9671-d973509c7572","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812664} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cc7ee1a3 e147cbd5-4164-4c07-9671-d973509c7572 /tidb/cdc/default/default/upstream/7365064254232088616 {"id":7365064254232088616,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e147cbd5-4164-4c07-9671-d973509c7572 {"id":"e147cbd5-4164-4c07-9671-d973509c7572","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812664} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cc7ee1a3 e147cbd5-4164-4c07-9671-d973509c7572 /tidb/cdc/default/default/upstream/7365064254232088616 {"id":7365064254232088616,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Create changefeed successfully! ID: c4b12d96-948c-4837-be93-28eed329a9df Info: {"upstream_id":7365064254232088616,"namespace":"default","id":"c4b12d96-948c-4837-be93-28eed329a9df","sink_uri":"kafka://127.0.0.1:9092/ticdc-region-merge-test-29042?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:51:07.953455541+08:00","start_ts":449527851985928196,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527851985928196,"checkpoint_ts":449527851985928196,"checkpoint_time":"2024-05-04 16:51:07.793"} [Sat May 4 16:51:07 CST 2024] <<<<<< START kafka consumer in region_merge case >>>>>> check_changefeed_status 127.0.0.1:8300 cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3 warning last_warning kafka sink injected error + endpoint=127.0.0.1:8300 + changefeed_id=cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3 + expected_state=warning + field=last_warning + error_pattern=kafka ++ curl 127.0.0.1:8300/api/v2/changefeeds/cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 86 100 86 0 0 775 0 --:--:-- --:--:-- --:--:-- 781 + info='{"state":"normal","resolved_ts":449527851483136033,"checkpoint_ts":449527851483136033}' + echo '{"state":"normal","resolved_ts":449527851483136033,"checkpoint_ts":449527851483136033}' {"state":"normal","resolved_ts":449527851483136033,"checkpoint_ts":449527851483136033} ++ echo '{"state":"normal","resolved_ts":449527851483136033,"checkpoint_ts":449527851483136033}' ++ jq -r .state split_and_random_merge scale: 20 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3313700016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fzbr4-3vph9, pid:10020, start at 2024-05-04 16:51:07.119296836 +0800 CST m=+5.120043880 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:53:07.127 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:51:07.100 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:41:07.100 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3313700016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fzbr4-3vph9, pid:10020, start at 2024-05-04 16:51:07.119296836 +0800 CST m=+5.120043880 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:53:07.127 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:51:07.100 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:41:07.100 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3314dc0009 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-fzbr4-3vph9, pid:10105, start at 2024-05-04 16:51:07.202088391 +0800 CST m=+5.151954466 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:53:07.208 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:51:07.191 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:41:07.191 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/partition_table/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/partition_table/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/partition_table/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/partition_table/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/partition_table/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + state=normal + [[ ! normal == \w\a\r\n\i\n\g ]] + echo 'changefeed state normal does not equal to warning' changefeed state normal does not equal to warning + exit 1 run task failed 2-th time, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b32fbb00008 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-ppnsc-4cvmp, pid:10859, start at 2024-05-04 16:51:05.589220085 +0800 CST m=+5.250961454 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:53:05.596 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:51:05.580 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:41:05.580 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b32fbb00008 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-ppnsc-4cvmp, pid:10859, start at 2024-05-04 16:51:05.589220085 +0800 CST m=+5.250961454 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:53:05.596 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:51:05.580 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:41:05.580 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b32fd340014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-ppnsc-4cvmp, pid:10932, start at 2024-05-04 16:51:05.711496567 +0800 CST m=+5.322318292 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:53:05.719 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:51:05.677 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:41:05.677 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash-proxy.toml"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } Verifying downstream PD is started... + set +x check_changefeed_state http://127.0.0.1:2379 changefeed-initialize-error normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=changefeed-initialize-error + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-initialize-error -s + info='{ "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-initialize-error", "state": "normal", "checkpoint_tso": 449527851008917506, "checkpoint_time": "2024-05-04 16:51:04.066", "error": null }' + echo '{ "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-initialize-error", "state": "normal", "checkpoint_tso": 449527851008917506, "checkpoint_time": "2024-05-04 16:51:04.066", "error": null }' { "upstream_id": 7365063988073976772, "namespace": "default", "id": "changefeed-initialize-error", "state": "normal", "checkpoint_tso": 449527851008917506, "checkpoint_time": "2024-05-04 16:51:04.066", "error": null } ++ echo '{' '"upstream_id":' 7365063988073976772, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"normal",' '"checkpoint_tso":' 449527851008917506, '"checkpoint_time":' '"2024-05-04' '16:51:04.066",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365063988073976772, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"normal",' '"checkpoint_tso":' 449527851008917506, '"checkpoint_time":' '"2024-05-04' '16:51:04.066",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.7207.out cli changefeed remove -c changefeed-initialize-error \033[0;36m<<< Run all test success >>>\033[0m [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc already exists) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // container [Pipeline] } Changefeed remove successfully. ID: changefeed-initialize-error CheckpointTs: 449527851008917506 SinkURI: kafka://127.0.0.1:9092/ticdc-sink-retry-test-30679?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 PASS coverage: 2.0% of statements in github.com/pingcap/tiflow/... [Pipeline] // withEnv Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [Pipeline] } [Pipeline] // node [Pipeline] } + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.partition_table.cli.11545.out cli tso query --pd=http://127.0.0.1:2379 [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] // stage [Pipeline] } + cd /tmp/tidb_cdc_test/synced_status_with_redo ++ run_cdc_cli_tso_query 127.0.0.1 2379 + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.cli.12283.out cli tso query --pd=http://127.0.0.1:2379 table ddl_manager.finish_mark not exists for 39-th check, retry later + set +x wait process cdc.test exit for 1-th time... + set +x + tso='449527852722028545 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527852722028545 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 16:51:12 CST 2024] <<<<<< START cdc server in partition_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.partition_table.1158411586.out server --log-file /tmp/tidb_cdc_test/partition_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/partition_table/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + tso='449527852821643265 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527852821643265 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + start_ts=449527852821643265 + run_cdc_server --workdir /tmp/tidb_cdc_test/synced_status_with_redo --binary cdc.test [Sat May 4 16:51:12 CST 2024] <<<<<< START cdc server in synced_status_with_redo case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + GO_FAILPOINTS= + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.1232112323.out server --log-file /tmp/tidb_cdc_test/synced_status_with_redo/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/synced_status_with_redo/cdc_data --cluster-id default + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 wait process cdc.test exit for 2-th time... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check_changefeed_status 127.0.0.1:8300 cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3 warning last_warning kafka sink injected error + endpoint=127.0.0.1:8300 + changefeed_id=cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3 + expected_state=warning + field=last_warning + error_pattern=kafka ++ curl 127.0.0.1:8300/api/v2/changefeeds/cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 2199 0 --:--:-- --:--:-- --:--:-- 2218 + info='{"state":"warning","resolved_ts":449527853134905353,"checkpoint_ts":449527851509612573,"last_warning":{"time":"2024-05-04T16:51:09.801137114+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449527853134905353,"checkpoint_ts":449527851509612573,"last_warning":{"time":"2024-05-04T16:51:09.801137114+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449527853134905353,"checkpoint_ts":449527851509612573,"last_warning":{"time":"2024-05-04T16:51:09.801137114+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449527853134905353,"checkpoint_ts":449527851509612573,"last_warning":{"time":"2024-05-04T16:51:09.801137114+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \w\a\r\n\i\n\g ]] + [[ -z last_warning ]] ++ echo '{"state":"warning","resolved_ts":449527853134905353,"checkpoint_ts":449527851509612573,"last_warning":{"time":"2024-05-04T16:51:09.801137114+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .last_warning.message + error_msg='kafka sink injected error' + [[ ! kafka sink injected error =~ kafka ]] run task successfully table ddl_manager.finish_mark not exists for 40-th check, retry later check_changefeed_status 127.0.0.1:8300 cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3 normal + endpoint=127.0.0.1:8300 + changefeed_id=cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3 + expected_state=normal + field= + error_pattern= ++ curl 127.0.0.1:8300/api/v2/changefeeds/cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 1851 0 --:--:-- --:--:-- --:--:-- 1862 + info='{"state":"warning","resolved_ts":449527853134905353,"checkpoint_ts":449527851509612573,"last_warning":{"time":"2024-05-04T16:51:09.801137114+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449527853134905353,"checkpoint_ts":449527851509612573,"last_warning":{"time":"2024-05-04T16:51:09.801137114+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449527853134905353,"checkpoint_ts":449527851509612573,"last_warning":{"time":"2024-05-04T16:51:09.801137114+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449527853134905353,"checkpoint_ts":449527851509612573,"last_warning":{"time":"2024-05-04T16:51:09.801137114+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \n\o\r\m\a\l ]] + echo 'changefeed state warning does not equal to normal' changefeed state warning does not equal to normal + exit 1 run task failed 1-th time, retry later wait process cdc.test exit for 3-th time... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b336f680016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-9z3t4-2gjxq, pid:1417, start at 2024-05-04 16:51:13.006549772 +0800 CST m=+5.137396495 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:53:13.013 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:51:12.986 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:41:12.986 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b336f680016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-9z3t4-2gjxq, pid:1417, start at 2024-05-04 16:51:13.006549772 +0800 CST m=+5.137396495 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:53:13.013 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:51:12.986 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:41:12.986 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3370e40014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-9z3t4-2gjxq, pid:1503, start at 2024-05-04 16:51:13.112927993 +0800 CST m=+5.192264999 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:53:13.121 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:51:13.081 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:41:13.081 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/new_ci_collation/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/new_ci_collation/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/new_ci_collation/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/new_ci_collation/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/new_ci_collation/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 16:51:14 CST 2024] <<<<<< run test case changefeed_error success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 41-th check, retry later *************************** 1. row *************************** count(distinct region_id): 1 check_changefeed_status 127.0.0.1:8300 cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3 normal + endpoint=127.0.0.1:8300 + changefeed_id=cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3 + expected_state=normal + field= + error_pattern= ++ curl 127.0.0.1:8300/api/v2/changefeeds/cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 3878 0 --:--:-- --:--:-- --:--:-- 3935 + info='{"state":"warning","resolved_ts":449527853659193354,"checkpoint_ts":449527851509612573,"last_warning":{"time":"2024-05-04T16:51:09.801137114+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449527853659193354,"checkpoint_ts":449527851509612573,"last_warning":{"time":"2024-05-04T16:51:09.801137114+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449527853659193354,"checkpoint_ts":449527851509612573,"last_warning":{"time":"2024-05-04T16:51:09.801137114+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449527853659193354,"checkpoint_ts":449527851509612573,"last_warning":{"time":"2024-05-04T16:51:09.801137114+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \n\o\r\m\a\l ]] + echo 'changefeed state warning does not equal to normal' changefeed state warning does not equal to normal + exit 1 run task failed 2-th time, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:51:15 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/daff9f64-6140-49fc-8a30-1d1dfb7cada1 {"id":"daff9f64-6140-49fc-8a30-1d1dfb7cada1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812672} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cc9b0ccd daff9f64-6140-49fc-8a30-1d1dfb7cada1 /tidb/cdc/default/default/upstream/7365064288112448513 {"id":7365064288112448513,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/daff9f64-6140-49fc-8a30-1d1dfb7cada1 {"id":"daff9f64-6140-49fc-8a30-1d1dfb7cada1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812672} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cc9b0ccd daff9f64-6140-49fc-8a30-1d1dfb7cada1 /tidb/cdc/default/default/upstream/7365064288112448513 {"id":7365064288112448513,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/daff9f64-6140-49fc-8a30-1d1dfb7cada1 {"id":"daff9f64-6140-49fc-8a30-1d1dfb7cada1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812672} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cc9b0ccd daff9f64-6140-49fc-8a30-1d1dfb7cada1 /tidb/cdc/default/default/upstream/7365064288112448513 {"id":7365064288112448513,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.partition_table.cli.11637.out cli changefeed create --start-ts=449527852722028545 '--sink-uri=kafka://127.0.0.1:9092/ticdc-partition-table-test-5187?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:51:15 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/93b367d1-6c7d-4df1-bc50-15cc8bf1d0a5 {"id":"93b367d1-6c7d-4df1-bc50-15cc8bf1d0a5","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812672} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cc9292d3 93b367d1-6c7d-4df1-bc50-15cc8bf1d0a5 /tidb/cdc/default/default/upstream/7365064280634228141 {"id":7365064280634228141,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/93b367d1-6c7d-4df1-bc50-15cc8bf1d0a5 {"id":"93b367d1-6c7d-4df1-bc50-15cc8bf1d0a5","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812672} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cc9292d3 93b367d1-6c7d-4df1-bc50-15cc8bf1d0a5 /tidb/cdc/default/default/upstream/7365064280634228141 {"id":7365064280634228141,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/93b367d1-6c7d-4df1-bc50-15cc8bf1d0a5 {"id":"93b367d1-6c7d-4df1-bc50-15cc8bf1d0a5","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812672} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cc9292d3 93b367d1-6c7d-4df1-bc50-15cc8bf1d0a5 /tidb/cdc/default/default/upstream/7365064280634228141 {"id":7365064280634228141,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + config_path=conf/changefeed-redo.toml + SINK_URI='mysql://root@127.0.0.1:3306/?max-txn-row=1' + run_cdc_cli changefeed create --start-ts=449527852821643265 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/conf/changefeed-redo.toml + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.cli.12382.out cli changefeed create --start-ts=449527852821643265 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/conf/changefeed-redo.toml Create changefeed successfully! ID: bab96b7f-eba3-4dd3-a0f1-028b94695f7c Info: {"upstream_id":7365064288112448513,"namespace":"default","id":"bab96b7f-eba3-4dd3-a0f1-028b94695f7c","sink_uri":"kafka://127.0.0.1:9092/ticdc-partition-table-test-5187?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:51:15.649091474+08:00","start_ts":449527852722028545,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527852722028545,"checkpoint_ts":449527852722028545,"checkpoint_time":"2024-05-04 16:51:10.601"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... Create changefeed successfully! ID: test-1 Info: {"upstream_id":7365064280634228141,"namespace":"default","id":"test-1","sink_uri":"mysql://root@127.0.0.1:3306/?max-txn-row=1","create_time":"2024-05-04T16:51:15.986125499+08:00","start_ts":449527852821643265,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"eventual","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"storage":"file:///tmp/tidb_cdc_test/synced_status/redo","use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":120,"checkpoint_interval":20}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527852821643265,"checkpoint_ts":449527852821643265,"checkpoint_time":"2024-05-04 16:51:10.981"} PASS coverage: 2.5% of statements in github.com/pingcap/tiflow/... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 42-th check, retry later [Sat May 4 16:51:16 CST 2024] <<<<<< START cdc server in new_ci_collation case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.new_ci_collation.28702872.out server --log-file /tmp/tidb_cdc_test/new_ci_collation/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/new_ci_collation/cdc_data --cluster-id default + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + set +x [Sat May 4 16:51:17 CST 2024] <<<<<< START kafka consumer in partition_table case >>>>>> =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/storage_cleanup/run.sh using Sink-Type: kafka... <<================= +++ dirname /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/storage_cleanup/run.sh ++ cd /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/storage_cleanup ++ pwd + CUR=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/storage_cleanup + source /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/storage_cleanup/../_utils/test_prepare ++ UP_TIDB_HOST=127.0.0.1 ++ UP_TIDB_PORT=4000 ++ UP_TIDB_OTHER_PORT=4001 ++ UP_TIDB_STATUS=10080 ++ UP_TIDB_OTHER_STATUS=10081 ++ DOWN_TIDB_HOST=127.0.0.1 ++ DOWN_TIDB_PORT=3306 ++ DOWN_TIDB_STATUS=20080 ++ TLS_TIDB_HOST=127.0.0.1 ++ TLS_TIDB_PORT=3307 ++ TLS_TIDB_STATUS=30080 ++ UP_PD_HOST_1=127.0.0.1 ++ UP_PD_PORT_1=2379 ++ UP_PD_PEER_PORT_1=2380 ++ UP_PD_HOST_2=127.0.0.1 ++ UP_PD_PORT_2=2679 ++ UP_PD_PEER_PORT_2=2680 ++ UP_PD_HOST_3=127.0.0.1 ++ UP_PD_PORT_3=2779 ++ UP_PD_PEER_PORT_3=2780 ++ DOWN_PD_HOST=127.0.0.1 ++ DOWN_PD_PORT=2479 ++ DOWN_PD_PEER_PORT=2480 ++ TLS_PD_HOST=127.0.0.1 ++ TLS_PD_PORT=2579 ++ TLS_PD_PEER_PORT=2580 ++ UP_TIKV_HOST_1=127.0.0.1 ++ UP_TIKV_PORT_1=20160 ++ UP_TIKV_STATUS_PORT_1=20181 ++ UP_TIKV_HOST_2=127.0.0.1 ++ UP_TIKV_PORT_2=20161 ++ UP_TIKV_STATUS_PORT_2=20182 ++ UP_TIKV_HOST_3=127.0.0.1 ++ UP_TIKV_PORT_3=20162 ++ UP_TIKV_STATUS_PORT_3=20183 ++ DOWN_TIKV_HOST=127.0.0.1 ++ DOWN_TIKV_PORT=21160 ++ DOWN_TIKV_STATUS_PORT=21180 ++ TLS_TIKV_HOST=127.0.0.1 ++ TLS_TIKV_PORT=22160 ++ TLS_TIKV_STATUS_PORT=22180 +++ cat /tmp/tidb_cdc_test/KAFKA_VERSION +++ echo 2.4.1 ++ KAFKA_VERSION=2.4.1 + WORK_DIR=/tmp/tidb_cdc_test/storage_cleanup + CDC_BINARY=cdc.test + SINK_TYPE=kafka + EXIST_FILES=() + CLEANED_FILES=() + trap stop_tidb_cluster EXIT + run kafka + '[' kafka '!=' storage ']' + return + check_logs /tmp/tidb_cdc_test/storage_cleanup ++ date + echo '[Sat May 4 16:51:16 CST 2024] <<<<<< run test case storage_cleanup success! >>>>>>' [Sat May 4 16:51:16 CST 2024] <<<<<< run test case storage_cleanup success! >>>>>> + stop_tidb_cluster + set +x ++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 221 100 221 0 0 5995 0 --:--:-- --:--:-- --:--:-- 6138 + synced_status='{"synced":true,"sink_checkpoint_ts":"2024-05-04 16:51:10.981","puller_resolved_ts":"1970-01-01 08:00:00.000","last_synced_ts":"1970-01-01 08:00:00.000","now_ts":"2024-05-04 16:51:17.000","info":"Data syncing is finished"}' ++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-04' '16:51:10.981","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-05-04' '16:51:17.000","info":"Data' syncing is 'finished"}' ++ jq .synced + status=true ++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-04' '16:51:10.981","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-05-04' '16:51:17.000","info":"Data' syncing is 'finished"}' ++ jq -r .sink_checkpoint_ts + sink_checkpoint_ts='2024-05-04 16:51:10.981' ++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-04' '16:51:10.981","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-05-04' '16:51:17.000","info":"Data' syncing is 'finished"}' ++ jq -r .puller_resolved_ts + puller_resolved_ts='1970-01-01 08:00:00.000' ++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-04' '16:51:10.981","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-05-04' '16:51:17.000","info":"Data' syncing is 'finished"}' ++ jq -r .last_synced_ts + last_synced_ts='1970-01-01 08:00:00.000' + '[' true '!=' true ']' + '[' '1970-01-01 08:00:00.000' '!=' '1970-01-01 08:00:00.000' ']' + '[' '1970-01-01 08:00:00.000' '!=' '1970-01-01 08:00:00.000' ']' ++ date '+%Y-%m-%d %H:%M:%S' + current='2024-05-04 16:51:17' + echo 'sink_checkpoint_ts is 2024-05-04' 16:51:10.981 sink_checkpoint_ts is 2024-05-04 16:51:10.981 ++ date -d '2024-05-04 16:51:10.981' +%s + checkpoint_timestamp=1714812670 ++ date -d '2024-05-04 16:51:17' +%s + current_timestamp=1714812677 + '[' 7 -gt 300 ']' + run_sql 'USE TEST;Create table t1(a int primary key, b int);insert into t1 values(1,2);insert into t1 values(2,3);' + check_table_exists test.t1 127.0.0.1 3306 table test.t1 not exists for 1-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b33b3140013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-f9n2q-3jlvd, pid:4833, start at 2024-05-04 16:51:17.355015559 +0800 CST m=+5.218017619 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:53:17.361 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:51:17.317 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:41:17.317 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 43-th check, retry later split_and_random_merge scale: 40 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:51:19 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d6dd487c-59c7-4f4e-b4dc-bf7833dc98f6 {"id":"d6dd487c-59c7-4f4e-b4dc-bf7833dc98f6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812676} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ccb208c7 d6dd487c-59c7-4f4e-b4dc-bf7833dc98f6 /tidb/cdc/default/default/upstream/7365064314634846666 {"id":7365064314634846666,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d6dd487c-59c7-4f4e-b4dc-bf7833dc98f6 {"id":"d6dd487c-59c7-4f4e-b4dc-bf7833dc98f6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812676} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ccb208c7 d6dd487c-59c7-4f4e-b4dc-bf7833dc98f6 /tidb/cdc/default/default/upstream/7365064314634846666 {"id":7365064314634846666,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d6dd487c-59c7-4f4e-b4dc-bf7833dc98f6 {"id":"d6dd487c-59c7-4f4e-b4dc-bf7833dc98f6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812676} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ccb208c7 d6dd487c-59c7-4f4e-b4dc-bf7833dc98f6 /tidb/cdc/default/default/upstream/7365064314634846666 {"id":7365064314634846666,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: 69528d5c-d614-4726-a795-9568c79f917e Info: {"upstream_id":7365064314634846666,"namespace":"default","id":"69528d5c-d614-4726-a795-9568c79f917e","sink_uri":"kafka://127.0.0.1:9092/ticdc-new_ci_collation-test-26929?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:51:19.597800513+08:00","start_ts":449527854199472129,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527854199472129,"checkpoint_ts":449527854199472129,"checkpoint_time":"2024-05-04 16:51:16.237"} [Sat May 4 16:51:19 CST 2024] <<<<<< START kafka consumer in new_ci_collation case >>>>>> =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/csv_storage_basic/run.sh using Sink-Type: kafka... <<================= [Sat May 4 16:51:19 CST 2024] <<<<<< run test case csv_storage_basic success! >>>>>> table test.t1 exists + sleep 5 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b33b3140013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-f9n2q-3jlvd, pid:4833, start at 2024-05-04 16:51:17.355015559 +0800 CST m=+5.218017619 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:53:17.361 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:51:17.317 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:41:17.317 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b33c35c000d Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-f9n2q-3jlvd, pid:4918, start at 2024-05-04 16:51:18.371034299 +0800 CST m=+6.167068701 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:53:18.377 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:51:18.359 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:41:18.359 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_pause_resume/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table ddl_manager.finish_mark not exists for 44-th check, retry later table new_ci_collation_test.t1 not exists for 1-th check, retry later check_changefeed_status 127.0.0.1:8300 cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3 normal + endpoint=127.0.0.1:8300 + changefeed_id=cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3 + expected_state=normal + field= + error_pattern= ++ curl 127.0.0.1:8300/api/v2/changefeeds/cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 2218 0 --:--:-- --:--:-- --:--:-- 2238 + info='{"state":"warning","resolved_ts":449527855232057351,"checkpoint_ts":449527851509612573,"last_warning":{"time":"2024-05-04T16:51:09.801137114+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449527855232057351,"checkpoint_ts":449527851509612573,"last_warning":{"time":"2024-05-04T16:51:09.801137114+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449527855232057351,"checkpoint_ts":449527851509612573,"last_warning":{"time":"2024-05-04T16:51:09.801137114+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449527855232057351,"checkpoint_ts":449527851509612573,"last_warning":{"time":"2024-05-04T16:51:09.801137114+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \n\o\r\m\a\l ]] + echo 'changefeed state warning does not equal to normal' changefeed state warning does not equal to normal + exit 1 run task failed 3-th time, retry later [Sat May 4 16:51:22 CST 2024] <<<<<< START cdc server in changefeed_pause_resume case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_pause_resume.63166318.out server --log-file /tmp/tidb_cdc_test/changefeed_pause_resume/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_pause_resume/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table ddl_manager.finish_mark not exists for 45-th check, retry later table new_ci_collation_test.t1 exists table new_ci_collation_test.t2 not exists for 1-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/csv_storage_multi_tables_ddl/run.sh using Sink-Type: kafka... <<================= [Sat May 4 16:51:23 CST 2024] <<<<<< run test case csv_storage_multi_tables_ddl success! >>>>>> table ddl_manager.finish_mark not exists for 46-th check, retry later table new_ci_collation_test.t2 exists table new_ci_collation_test.t3 not exists for 1-th check, retry later table partition_table.t exists table partition_table.t1 exists table partition_table.t2 not exists for 1-th check, retry later *************************** 1. row *************************** count(distinct region_id): 1 ++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 243 100 243 0 0 2377 0 --:--:-- --:--:-- --:--:-- 2382 + synced_status='{"synced":false,"sink_checkpoint_ts":"2024-05-04 16:51:23.380","puller_resolved_ts":"2024-05-04 16:51:17.680","last_synced_ts":"2024-05-04 16:51:18.180","now_ts":"2024-05-04 16:51:25.000","info":"The data syncing is not finished, please wait"}' ++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-04' '16:51:23.380","puller_resolved_ts":"2024-05-04' '16:51:17.680","last_synced_ts":"2024-05-04' '16:51:18.180","now_ts":"2024-05-04' '16:51:25.000","info":"The' data syncing is not finished, please 'wait"}' ++ jq .synced + status=false + '[' false '!=' false ']' ++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-04' '16:51:23.380","puller_resolved_ts":"2024-05-04' '16:51:17.680","last_synced_ts":"2024-05-04' '16:51:18.180","now_ts":"2024-05-04' '16:51:25.000","info":"The' data syncing is not finished, please 'wait"}' ++ jq -r .info + info='The data syncing is not finished, please wait' + '[' 'The data syncing is not finished, please wait' '!=' 'The data syncing is not finished, please wait' ']' + sleep 130 + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:51:25 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d622c1c4-f6b2-4ed9-a9a9-960d1d76e040 {"id":"d622c1c4-f6b2-4ed9-a9a9-960d1d76e040","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812682} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ccc28fd0 d622c1c4-f6b2-4ed9-a9a9-960d1d76e040 /tidb/cdc/default/default/upstream/7365064325997101651 {"id":7365064325997101651,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d622c1c4-f6b2-4ed9-a9a9-960d1d76e040 {"id":"d622c1c4-f6b2-4ed9-a9a9-960d1d76e040","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812682} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ccc28fd0 d622c1c4-f6b2-4ed9-a9a9-960d1d76e040 /tidb/cdc/default/default/upstream/7365064325997101651 {"id":7365064325997101651,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/d622c1c4-f6b2-4ed9-a9a9-960d1d76e040 {"id":"d622c1c4-f6b2-4ed9-a9a9-960d1d76e040","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812682} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ccc28fd0 d622c1c4-f6b2-4ed9-a9a9-960d1d76e040 /tidb/cdc/default/default/upstream/7365064325997101651 {"id":7365064325997101651,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [Sat May 4 16:51:25 CST 2024] <<<<<< START kafka consumer in changefeed_pause_resume case >>>>>> table changefeed_pause_resume.t1 not exists for 1-th check, retry later table ddl_manager.finish_mark not exists for 47-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/ddl_sequence/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/csv_storage_partition_table/run.sh using Sink-Type: kafka... <<================= [Sat May 4 16:51:26 CST 2024] <<<<<< run test case csv_storage_partition_table success! >>>>>> check_changefeed_status 127.0.0.1:8300 cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3 normal + endpoint=127.0.0.1:8300 + changefeed_id=cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3 + expected_state=normal + field= + error_pattern= ++ curl 127.0.0.1:8300/api/v2/changefeeds/cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 244 100 244 0 0 2245 0 --:--:-- --:--:-- --:--:-- 2259 + info='{"state":"warning","resolved_ts":449527856818028545,"checkpoint_ts":449527851509612573,"last_warning":{"time":"2024-05-04T16:51:09.801137114+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' + echo '{"state":"warning","resolved_ts":449527856818028545,"checkpoint_ts":449527851509612573,"last_warning":{"time":"2024-05-04T16:51:09.801137114+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}' {"state":"warning","resolved_ts":449527856818028545,"checkpoint_ts":449527851509612573,"last_warning":{"time":"2024-05-04T16:51:09.801137114+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}} ++ echo '{"state":"warning","resolved_ts":449527856818028545,"checkpoint_ts":449527851509612573,"last_warning":{"time":"2024-05-04T16:51:09.801137114+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}' ++ jq -r .state + state=warning + [[ ! warning == \n\o\r\m\a\l ]] + echo 'changefeed state warning does not equal to normal' changefeed state warning does not equal to normal + exit 1 run task failed 4-th time, retry later table new_ci_collation_test.t3 exists table new_ci_collation_test.t4 not exists for 1-th check, retry later table partition_table.t2 not exists for 2-th check, retry later table changefeed_pause_resume.t1 not exists for 2-th check, retry later table ddl_manager.finish_mark not exists for 48-th check, retry later table new_ci_collation_test.t4 exists table new_ci_collation_test.t5 not exists for 1-th check, retry later table partition_table.t2 not exists for 3-th check, retry later split_and_random_merge scale: 80 \033[0;36m<<< Run all test success >>>\033[0m [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc already exists) [Pipeline] // cache [Pipeline] } [Pipeline] // dir table changefeed_pause_resume.t1 exists table changefeed_pause_resume.t2 exists table changefeed_pause_resume.t3 not exists for 1-th check, retry later [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage start tidb cluster in /tmp/tidb_cdc_test/ddl_sequence Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... [Pipeline] } [Pipeline] // container [Pipeline] } table ddl_manager.finish_mark not exists for 49-th check, retry later table partition_table.t2 not exists for 4-th check, retry later [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] } table new_ci_collation_test.t5 exists check diff failed 1-th time, retry later table changefeed_pause_resume.t3 exists table ddl_manager.finish_mark not exists for 50-th check, retry later check diff failed 2-th time, retry later table partition_table.t2 exists table partition_table.finish_mark not exists for 1-th check, retry later check diff failed 1-th time, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release check_changefeed_status 127.0.0.1:8300 cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3 normal + endpoint=127.0.0.1:8300 + changefeed_id=cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3 + expected_state=normal + field= + error_pattern= ++ curl 127.0.0.1:8300/api/v2/changefeeds/cb7cf5b3-6915-49ec-a51d-0658e1a9cfa3/status % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 86 100 86 0 0 775 0 --:--:-- --:--:-- --:--:-- 781 + info='{"state":"normal","resolved_ts":449527858915180546,"checkpoint_ts":449527858915180546}' + echo '{"state":"normal","resolved_ts":449527858915180546,"checkpoint_ts":449527858915180546}' {"state":"normal","resolved_ts":449527858915180546,"checkpoint_ts":449527858915180546} ++ echo '{"state":"normal","resolved_ts":449527858915180546,"checkpoint_ts":449527858915180546}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] + [[ -z '' ]] ++ echo '{"state":"normal","resolved_ts":449527858915180546,"checkpoint_ts":449527858915180546}' ++ jq -r .last_error + error_msg=null + [[ ! null == \n\u\l\l ]] ++ echo '{"state":"normal","resolved_ts":449527858915180546,"checkpoint_ts":449527858915180546}' ++ jq -r .last_warning + error_msg=null + [[ ! null == \n\u\l\l ]] + exit 0 run task successfully table kafka_sink_error_resume.t1 exists table kafka_sink_error_resume.t2 exists table ddl_manager.finish_mark not exists for 51-th check, retry later check diff failed 3-th time, retry later table partition_table.finish_mark not exists for 2-th check, retry later check diff failed 1-th time, retry later check diff failed 2-th time, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) *************************** 1. row *************************** count(distinct region_id): 1 table ddl_manager.finish_mark not exists for 52-th check, retry later table partition_table.finish_mark not exists for 3-th check, retry later check diff failed 4-th time, retry later check diff successfully check diff failed 1-th time, retry later check diff failed 3-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 53-th check, retry later table partition_table.finish_mark not exists for 4-th check, retry later check diff failed 5-th time, retry later check diff successfully wait process cdc.test exit for 1-th time... check diff failed 4-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 54-th check, retry later wait process cdc.test exit for 2-th time... table partition_table.finish_mark not exists for 5-th check, retry later cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 16:51:41 CST 2024] <<<<<< run test case kafka_sink_error_resume success! >>>>>> table region_merge.t1 exists check diff failed 1-th time, retry later check diff successfully check diff failed 1-th time, retry later check diff failed 5-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 55-th check, retry later check diff successfully table partition_table.finish_mark not exists for 6-th check, retry later wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... check diff successfully check diff successfully wait process cdc.test exit for 1-th time... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b353fcc0012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tvq5x-hqf9m, pid:8354, start at 2024-05-04 16:51:42.741021258 +0800 CST m=+6.516076014 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:53:42.748 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:51:42.758 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:41:42.758 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b353fcc0012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tvq5x-hqf9m, pid:8354, start at 2024-05-04 16:51:42.741021258 +0800 CST m=+6.516076014 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:53:42.748 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:51:42.758 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:41:42.758 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b353fe40014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tvq5x-hqf9m, pid:8438, start at 2024-05-04 16:51:42.742573683 +0800 CST m=+6.464004095 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:53:42.750 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:51:42.713 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:41:42.713 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/ddl_sequence/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/ddl_sequence/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/ddl_sequence/tiflash-proxy.toml"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/ddl_sequence/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/ddl_sequence/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 16:51:44 CST 2024] <<<<<< run test case region_merge success! >>>>>> wait process cdc.test exit for 2-th time... table ddl_manager.finish_mark not exists for 56-th check, retry later table partition_table.finish_mark not exists for 7-th check, retry later cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 16:51:45 CST 2024] <<<<<< run test case new_ci_collation success! >>>>>> check diff failed 1-th time, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_sequence.cli.9771.out cli tso query --pd=http://127.0.0.1:2379 table ddl_manager.finish_mark not exists for 57-th check, retry later table partition_table.finish_mark exists check diff successfully wait process cdc.test exit for 1-th time... check diff failed 2-th time, retry later wait process cdc.test exit for 2-th time... + set +x + tso='449527862252797954 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527862252797954 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 16:51:48 CST 2024] <<<<<< START cdc server in ddl_sequence case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_sequence.98099811.out server --log-file /tmp/tidb_cdc_test/ddl_sequence/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_sequence/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 16:51:48 CST 2024] <<<<<< run test case partition_table success! >>>>>> table ddl_manager.finish_mark not exists for 58-th check, retry later check diff failed 3-th time, retry later table ddl_manager.finish_mark not exists for 59-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:51:51 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2fdeb385-f3bf-413a-bb59-ba07f5075541 {"id":"2fdeb385-f3bf-413a-bb59-ba07f5075541","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812708} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cd2079d3 2fdeb385-f3bf-413a-bb59-ba07f5075541 /tidb/cdc/default/default/upstream/7365064435768353445 {"id":7365064435768353445,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2fdeb385-f3bf-413a-bb59-ba07f5075541 {"id":"2fdeb385-f3bf-413a-bb59-ba07f5075541","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812708} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cd2079d3 2fdeb385-f3bf-413a-bb59-ba07f5075541 /tidb/cdc/default/default/upstream/7365064435768353445 {"id":7365064435768353445,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/2fdeb385-f3bf-413a-bb59-ba07f5075541 {"id":"2fdeb385-f3bf-413a-bb59-ba07f5075541","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812708} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cd2079d3 2fdeb385-f3bf-413a-bb59-ba07f5075541 /tidb/cdc/default/default/upstream/7365064435768353445 {"id":7365064435768353445,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_sequence.cli.9869.out cli changefeed create --start-ts=449527862252797954 '--sink-uri=kafka://127.0.0.1:9092/ticdc-ddl-sequence-test-21752?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: d0f3b0b1-5071-454e-9635-803d34f897fc Info: {"upstream_id":7365064435768353445,"namespace":"default","id":"d0f3b0b1-5071-454e-9635-803d34f897fc","sink_uri":"kafka://127.0.0.1:9092/ticdc-ddl-sequence-test-21752?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:51:52.094759538+08:00","start_ts":449527862252797954,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527862252797954,"checkpoint_ts":449527862252797954,"checkpoint_time":"2024-05-04 16:51:46.958"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... check diff failed 4-th time, retry later table ddl_manager.finish_mark not exists for 60-th check, retry later + set +x [Sat May 4 16:51:53 CST 2024] <<<<<< START kafka consumer in ddl_sequence case >>>>>> check diff failed 5-th time, retry later table ddl_manager.finish_mark not exists for 61-th check, retry later table ddl_sequence.finish_mark not exists for 1-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_lost_callback/run.sh using Sink-Type: kafka... <<================= [Sat May 4 16:51:55 CST 2024] <<<<<< run test case mq_sink_lost_callback success! >>>>>> check diff failed 6-th time, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/common_1/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table ddl_manager.finish_mark not exists for 62-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/batch_add_table/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table ddl_sequence.finish_mark not exists for 2-th check, retry later check diff successfully table ddl_manager.finish_mark not exists for 63-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_dispatcher/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... start tidb cluster in /tmp/tidb_cdc_test/common_1 Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/batch_add_table Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table ddl_sequence.finish_mark not exists for 3-th check, retry later check diff failed 1-th time, retry later \033[0;36m<<< Run all test success >>>\033[0m [Pipeline] } table ddl_manager.finish_mark not exists for 64-th check, retry later Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc already exists) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } Verifying downstream PD is started... [Pipeline] // withCredentials [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] } Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table ddl_sequence.finish_mark not exists for 4-th check, retry later check diff failed 2-th time, retry later table ddl_manager.finish_mark not exists for 65-th check, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table ddl_sequence.finish_mark not exists for 5-th check, retry later check diff failed 3-th time, retry later Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_manager.finish_mark not exists for 66-th check, retry later table ddl_sequence.finish_mark exists check diff successfully ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 4-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 1-th time... table ddl_manager.finish_mark not exists for 67-th check, retry later wait process cdc.test exit for 2-th time... start tidb cluster in /tmp/tidb_cdc_test/mq_sink_dispatcher Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 16:52:07 CST 2024] <<<<<< run test case ddl_sequence success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 5-th time, retry later [2024/05/04 16:52:01.613 +08:00] [INFO] [main.go:99] ["running ddl test: 1 modifyColumnDefaultValueDDL2"] [2024/05/04 16:52:01.613 +08:00] [INFO] [main.go:99] ["running ddl test: 0 modifyColumnDefaultValueDDL1"] [2024/05/04 16:52:02.135 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsa33c4e13_4f97_4c4a_bebf_87f539c847e1"] [2024/05/04 16:52:02.143 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsaa8d48df_242d_45a0_a595_3f7acb223c49"] [2024/05/04 16:52:02.144 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs81f82b6f_ccb9_485d_acba_4cc4d8a7c10f"] [2024/05/04 16:52:02.146 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsee67a0c4_0e73_4a71_bdd3_1b2469082a43"] [2024/05/04 16:52:02.147 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs1adb2a26_04c5_4709_941b_eefbf418b76a"] [2024/05/04 16:52:02.148 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs735f6d04_215c_4b46_aadd_45f3b4826b2f"] [2024/05/04 16:52:02.150 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs5f7cc815_fa10_495a_bcaa_24b6e2192b09"] [2024/05/04 16:52:02.151 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsedc42865_2e78_48dc_aa2e_759f81b86eb2"] [2024/05/04 16:52:02.200 +08:00] [INFO] [main.go:178] ["1 insert success: 100"] [2024/05/04 16:52:02.200 +08:00] [INFO] [main.go:178] ["1 insert success: 100"] [2024/05/04 16:52:02.290 +08:00] [INFO] [main.go:178] ["0 insert success: 100"] [2024/05/04 16:52:02.293 +08:00] [INFO] [main.go:178] ["0 insert success: 100"] [2024/05/04 16:52:02.715 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:02.724 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:02.732 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:02.732 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:02.734 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:02.735 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:02.736 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:02.736 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:02.737 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:02.739 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:02.739 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:02.740 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:02.741 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:02.741 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:02.743 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:02.748 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:02.769 +08:00] [INFO] [main.go:178] ["1 insert success: 200"] [2024/05/04 16:52:02.773 +08:00] [INFO] [main.go:178] ["1 insert success: 200"] [2024/05/04 16:52:03.037 +08:00] [INFO] [main.go:178] ["0 insert success: 200"] [2024/05/04 16:52:03.040 +08:00] [INFO] [main.go:178] ["0 insert success: 200"] [2024/05/04 16:52:03.042 +08:00] [INFO] [main.go:199] ["0 delete success: 100"] [2024/05/04 16:52:03.044 +08:00] [INFO] [main.go:199] ["0 delete success: 100"] [2024/05/04 16:52:03.319 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:03.324 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:03.329 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:03.333 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:03.334 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:03.334 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:03.335 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:03.337 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:03.338 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:03.339 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:03.339 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:03.340 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:03.340 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:03.342 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:03.343 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:03.347 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:03.367 +08:00] [INFO] [main.go:178] ["1 insert success: 300"] [2024/05/04 16:52:03.371 +08:00] [INFO] [main.go:178] ["1 insert success: 300"] [2024/05/04 16:52:03.732 +08:00] [INFO] [main.go:178] ["0 insert success: 300"] [2024/05/04 16:52:03.733 +08:00] [INFO] [main.go:178] ["0 insert success: 300"] [2024/05/04 16:52:03.916 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:03.928 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:03.930 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:03.931 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:03.933 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:03.934 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:03.937 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:03.940 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:03.942 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:03.945 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:03.953 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:03.954 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:03.955 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:03.956 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:03.957 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:03.962 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:04.025 +08:00] [INFO] [main.go:178] ["1 insert success: 400"] [2024/05/04 16:52:04.028 +08:00] [INFO] [main.go:178] ["1 insert success: 400"] [2024/05/04 16:52:04.528 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:04.543 +08:00] [INFO] [main.go:178] ["0 insert success: 400"] [2024/05/04 16:52:04.543 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:04.544 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:04.544 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:04.545 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:04.545 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:04.546 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:04.548 +08:00] [INFO] [main.go:199] ["0 delete success: 200"] [2024/05/04 16:52:04.550 +08:00] [INFO] [main.go:178] ["0 insert success: 400"] [2024/05/04 16:52:04.551 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:04.555 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:04.555 +08:00] [INFO] [main.go:199] ["0 delete success: 200"] [2024/05/04 16:52:04.558 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:04.558 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:04.558 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:04.561 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:04.612 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:04.613 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:04.616 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:04.649 +08:00] [INFO] [main.go:178] ["1 insert success: 500"] [2024/05/04 16:52:04.653 +08:00] [INFO] [main.go:178] ["1 insert success: 500"] [2024/05/04 16:52:05.050 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:05.115 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:05.119 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:05.124 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:05.128 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:05.128 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:05.129 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:05.133 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:05.136 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:05.231 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:05.232 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:05.233 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:05.237 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:05.237 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:05.239 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:05.239 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:05.263 +08:00] [INFO] [main.go:178] ["1 insert success: 600"] [2024/05/04 16:52:05.315 +08:00] [INFO] [main.go:178] ["1 insert success: 600"] [2024/05/04 16:52:05.319 +08:00] [INFO] [main.go:178] ["0 insert success: 500"] [2024/05/04 16:52:05.367 +08:00] [INFO] [main.go:178] ["0 insert success: 500"] [2024/05/04 16:52:05.649 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:05.717 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:05.721 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:05.727 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:05.728 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:05.731 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:05.733 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:05.735 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:05.740 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:05.812 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:05.812 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:05.813 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:05.813 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:05.813 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:05.814 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:05.821 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:05.860 +08:00] [INFO] [main.go:178] ["1 insert success: 700"] [2024/05/04 16:52:05.868 +08:00] [INFO] [main.go:178] ["1 insert success: 700"] [2024/05/04 16:52:06.113 +08:00] [INFO] [main.go:178] ["0 insert success: 600"] [2024/05/04 16:52:06.118 +08:00] [INFO] [main.go:199] ["0 delete success: 300"] [2024/05/04 16:52:06.140 +08:00] [INFO] [main.go:178] ["0 insert success: 600"] [2024/05/04 16:52:06.145 +08:00] [INFO] [main.go:199] ["0 delete success: 300"] [2024/05/04 16:52:06.154 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:06.240 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:06.243 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:06.245 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:06.249 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:06.254 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:06.255 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:06.315 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:06.315 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:06.355 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:06.356 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:06.357 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:06.359 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:06.359 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:06.359 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:06.371 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:06.440 +08:00] [INFO] [main.go:178] ["1 insert success: 800"] [2024/05/04 16:52:06.440 +08:00] [INFO] [main.go:178] ["1 insert success: 800"] [2024/05/04 16:52:06.715 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:06.822 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:06.824 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:06.829 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:06.833 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:06.839 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:06.845 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:06.847 +08:00] [INFO] [main.go:178] ["0 insert success: 700"] [2024/05/04 16:52:06.916 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:06.916 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:06.931 +08:00] [INFO] [main.go:178] ["0 insert success: 700"] [2024/05/04 16:52:06.935 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:06.940 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:06.941 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:06.944 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:06.944 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:06.944 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:07.012 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:07.028 +08:00] [INFO] [main.go:178] ["1 insert success: 900"] [2024/05/04 16:52:07.028 +08:00] [INFO] [main.go:178] ["1 insert success: 900"] [2024/05/04 16:52:07.240 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:07.423 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:07.427 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:07.429 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:07.431 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:07.439 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:07.445 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:07.523 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:07.527 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:07.549 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:07.553 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:07.555 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:07.555 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:07.557 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:07.561 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:07.619 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:07.638 +08:00] [INFO] [main.go:178] ["1 insert success: 1000"] [2024/05/04 16:52:07.640 +08:00] [INFO] [main.go:178] ["1 insert success: 1000"] [2024/05/04 16:52:07.640 +08:00] [INFO] [main.go:178] ["0 insert success: 800"] [2024/05/04 16:52:07.646 +08:00] [INFO] [main.go:199] ["0 delete success: 400"] [2024/05/04 16:52:07.724 +08:00] [INFO] [main.go:178] ["0 insert success: 800"] [2024/05/04 16:52:07.729 +08:00] [INFO] [main.go:199] ["0 delete success: 400"] [2024/05/04 16:52:07.817 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:07.946 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:08.018 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:08.019 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:08.024 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:08.034 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:08.039 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:08.124 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:08.130 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:08.148 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:08.158 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:08.160 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:08.213 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:08.219 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:08.219 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:08.244 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:08.253 +08:00] [INFO] [main.go:178] ["1 insert success: 1100"] [2024/05/04 16:52:08.258 +08:00] [INFO] [main.go:178] ["1 insert success: 1100"] [2024/05/04 16:52:08.426 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:08.537 +08:00] [INFO] [main.go:178] ["0 insert success: 900"] [2024/05/04 16:52:08.552 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:08.620 +08:00] [INFO] [main.go:178] ["0 insert success: 900"] [2024/05/04 16:52:08.637 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:08.643 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:08.644 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:08.648 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:08.656 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:08.731 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:08.732 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:08.745 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:08.756 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:08.760 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:08.812 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:08.819 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:08.824 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:08.844 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:08.852 +08:00] [INFO] [main.go:178] ["1 insert success: 1200"] [2024/05/04 16:52:08.855 +08:00] [INFO] [main.go:178] ["1 insert success: 1200"] table ddl_manager.finish_mark not exists for 68-th check, retry later [2024/05/04 16:52:08.956 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:09.147 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] table test.finish_mark not exists for 1-th check, retry later table test.finish_mark not exists for 2-th check, retry later table test.finish_mark not exists for 3-th check, retry later [2024/05/04 16:52:09.317 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:09.319 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:09.325 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:09.325 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:09.338 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:09.413 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:09.418 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:09.426 +08:00] [INFO] [main.go:178] ["0 insert success: 1000"] [2024/05/04 16:52:09.430 +08:00] [INFO] [main.go:199] ["0 delete success: 500"] [2024/05/04 16:52:09.435 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:09.441 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:09.452 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 16:52:09.457 +08:00] [INFO] [main.go:178] ["0 insert success: 1000"] [2024/05/04 16:52:09.458 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:09.461 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:09.462 +08:00] [INFO] [main.go:199] ["0 delete success: 500"] [2024/05/04 16:52:09.517 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:09.531 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:09.537 +08:00] [INFO] [main.go:178] ["1 insert success: 1300"] [2024/05/04 16:52:09.545 +08:00] [INFO] [main.go:178] ["1 insert success: 1300"] [2024/05/04 16:52:09.615 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:09.738 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:09.846 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:09.848 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:09.912 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:09.913 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:09.925 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:09.947 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:09.958 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:10.018 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:10.032 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:10.040 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:10.048 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:10.061 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:10.117 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:10.130 +08:00] [INFO] [main.go:178] ["1 insert success: 1400"] [2024/05/04 16:52:10.131 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:10.142 +08:00] [INFO] [main.go:178] ["1 insert success: 1400"] [2024/05/04 16:52:10.158 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:10.161 +08:00] [INFO] [main.go:178] ["0 insert success: 1100"] [2024/05/04 16:52:10.243 +08:00] [INFO] [main.go:178] ["0 insert success: 1100"] [2024/05/04 16:52:10.313 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:10.431 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:10.439 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 16:52:10.517 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:10.522 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:10.531 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:10.554 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:10.611 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:10.617 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:10.621 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:10.633 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:10.639 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:10.655 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:10.663 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:10.723 +08:00] [INFO] [main.go:178] ["1 insert success: 1500"] [2024/05/04 16:52:10.737 +08:00] [INFO] [main.go:178] ["1 insert success: 1500"] [2024/05/04 16:52:10.745 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 16:52:10.754 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:10.839 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] table ddl_manager.finish_mark not exists for 69-th check, retry later [2024/05/04 16:52:11.030 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:11.045 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:11.046 +08:00] [INFO] [main.go:178] ["0 insert success: 1200"] [2024/05/04 16:52:11.114 +08:00] [INFO] [main.go:199] ["0 delete success: 600"] [2024/05/04 16:52:11.136 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:11.144 +08:00] [INFO] [main.go:178] ["0 insert success: 1200"] [2024/05/04 16:52:11.146 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:11.148 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:11.148 +08:00] [INFO] [main.go:199] ["0 delete success: 600"] [2024/05/04 16:52:11.229 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 16:52:11.232 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:11.237 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 16:52:11.253 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 16:52:11.254 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:11.259 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 16:52:11.327 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 16:52:11.334 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 16:52:11.348 +08:00] [INFO] [main.go:178] ["1 insert success: 1600"] [2024/05/04 16:52:11.361 +08:00] [INFO] [main.go:178] ["1 insert success: 1600"] [2024/05/04 16:52:11.418 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 16:52:11.427 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:11.511 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 16:52:11.638 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 16:52:11.652 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 16:52:11.741 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release table test.finish_mark not exists for 4-th check, retry later [2024/05/04 16:52:11.750 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 16:52:11.816 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 16:52:11.840 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 16:52:11.844 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 16:52:11.845 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 16:52:11.862 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 16:52:11.865 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 16:52:11.867 +08:00] [INFO] [main.go:178] ["0 insert success: 1300"] [2024/05/04 16:52:11.870 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 16:52:11.917 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 16:52:11.934 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 16:52:11.946 +08:00] [INFO] [main.go:178] ["1 insert success: 1700"] [2024/05/04 16:52:11.960 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] check diff successfully [2024/05/04 16:52:12.137 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 16:52:12.146 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 16:52:12.225 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 16:52:12.251 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 16:52:12.259 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 16:52:12.261 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 16:52:12.314 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 16:52:12.314 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 16:52:12.319 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 16:52:12.375 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 16:52:12.387 +08:00] [INFO] [main.go:178] ["1 insert success: 1800"] [2024/05/04 16:52:12.391 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 16:52:12.401 +08:00] [INFO] [main.go:178] ["0 insert success: 1400"] [2024/05/04 16:52:12.404 +08:00] [INFO] [main.go:199] ["0 delete success: 700"] [2024/05/04 16:52:12.476 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] [2024/05/04 16:52:12.485 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b36e4180019 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-9z3t4-2gjxq, pid:4412, start at 2024-05-04 16:52:09.647121265 +0800 CST m=+5.177684567 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:54:09.656 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:52:09.655 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:42:09.655 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b36e4180019 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-9z3t4-2gjxq, pid:4412, start at 2024-05-04 16:52:09.647121265 +0800 CST m=+5.177684567 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:54:09.656 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:52:09.655 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:42:09.655 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b36e4a80014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-9z3t4-2gjxq, pid:4491, start at 2024-05-04 16:52:09.664473848 +0800 CST m=+5.145045839 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:54:09.672 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:52:09.642 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:42:09.642 +0800 All versions after safe point can be accessed. (DO NOT EDIT) [2024/05/04 16:52:12.564 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/05/04 16:52:12.616 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/05/04 16:52:12.675 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/05/04 16:52:12.688 +08:00] [INFO] [main.go:178] ["1 insert success: 1900"] [2024/05/04 16:52:12.694 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/batch_add_table/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/batch_add_table/tiflash/log/error.log arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/batch_add_table/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/batch_add_table/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/batch_add_table/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table ddl_manager.finish_mark not exists for 70-th check, retry later [2024/05/04 16:52:12.791 +08:00] [INFO] [main.go:178] ["72 insert success: 1900"] [2024/05/04 16:52:12.801 +08:00] [INFO] [main.go:178] ["0 insert success: 1500"] [2024/05/04 16:52:12.802 +08:00] [INFO] [main.go:178] ["72 insert success: 1900"] [2024/05/04 16:52:12.826 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsd8358d2a_62da_4cfa_bae4_b1d8bff55bec"] [2024/05/04 16:52:12.838 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs6e3015d4_3277_4e0c_8d37_f609fc6c03c9"] [2024/05/04 16:52:12.848 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs89de783b_61cc_49d9_9141_a51d9e558099"] [2024/05/04 16:52:12.878 +08:00] [INFO] [main.go:178] ["73 insert success: 1900"] [2024/05/04 16:52:12.927 +08:00] [INFO] [main.go:178] ["73 insert success: 1900"] [2024/05/04 16:52:12.927 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs14765fa9_1ffe_4435_826b_dae05e3ed30b"] [2024/05/04 16:52:13.017 +08:00] [INFO] [main.go:178] ["73 insert success: 1900"] [2024/05/04 16:52:13.037 +08:00] [INFO] [main.go:178] ["1 insert success: 2000"] [2024/05/04 16:52:13.046 +08:00] [INFO] [main.go:178] ["72 insert success: 1900"] [2024/05/04 16:52:13.249 +08:00] [INFO] [main.go:178] ["72 insert success: 2000"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b36f2a00012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k8pxt-9vhj7, pid:7053, start at 2024-05-04 16:52:10.570722579 +0800 CST m=+5.696898782 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:54:10.578 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:52:10.536 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:42:10.536 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b36f2a00012 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k8pxt-9vhj7, pid:7053, start at 2024-05-04 16:52:10.570722579 +0800 CST m=+5.696898782 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:54:10.578 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:52:10.536 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:42:10.536 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b36f4180014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-k8pxt-9vhj7, pid:7130, start at 2024-05-04 16:52:10.65654519 +0800 CST m=+5.725083418 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:54:10.664 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:52:10.630 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:42:10.630 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/common_1/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/common_1/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/common_1/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/common_1/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/common_1/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } check diff failed 1-th time, retry later [2024/05/04 16:52:13.317 +08:00] [INFO] [main.go:178] ["72 insert success: 2000"] [2024/05/04 16:52:13.334 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:13.338 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:13.341 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:13.349 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:13.356 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:13.359 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:13.376 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs0dd6bd05_3901_4c0a_bc37_2d6f76ed4a85"] [2024/05/04 16:52:13.397 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:13.400 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.finish_mark not exists for 5-th check, retry later [2024/05/04 16:52:13.655 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:13.656 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:13.659 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:13.668 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:13.673 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:13.674 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:13.695 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:13.697 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:13.712 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:13.716 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] check_changefeed_state http://127.0.0.1:2379 45d0066c-38ee-4f65-9eb7-f9c01f7beb56 finished null + endpoints=http://127.0.0.1:2379 + changefeed_id=45d0066c-38ee-4f65-9eb7-f9c01f7beb56 + expected_state=finished + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 45d0066c-38ee-4f65-9eb7-f9c01f7beb56 -s + info='{ "upstream_id": 7365064094549801561, "namespace": "default", "id": "45d0066c-38ee-4f65-9eb7-f9c01f7beb56", "state": "finished", "checkpoint_tso": 449527866234765315, "checkpoint_time": "2024-05-04 16:52:02.148", "error": null }' + echo '{ "upstream_id": 7365064094549801561, "namespace": "default", "id": "45d0066c-38ee-4f65-9eb7-f9c01f7beb56", "state": "finished", "checkpoint_tso": 449527866234765315, "checkpoint_time": "2024-05-04 16:52:02.148", "error": null }' { "upstream_id": 7365064094549801561, "namespace": "default", "id": "45d0066c-38ee-4f65-9eb7-f9c01f7beb56", "state": "finished", "checkpoint_tso": 449527866234765315, "checkpoint_time": "2024-05-04 16:52:02.148", "error": null } ++ echo '{' '"upstream_id":' 7365064094549801561, '"namespace":' '"default",' '"id":' '"45d0066c-38ee-4f65-9eb7-f9c01f7beb56",' '"state":' '"finished",' '"checkpoint_tso":' 449527866234765315, '"checkpoint_time":' '"2024-05-04' '16:52:02.148",' '"error":' null '}' ++ jq -r .state + state=finished + [[ ! finished == \f\i\n\i\s\h\e\d ]] ++ echo '{' '"upstream_id":' 7365064094549801561, '"namespace":' '"default",' '"id":' '"45d0066c-38ee-4f65-9eb7-f9c01f7beb56",' '"state":' '"finished",' '"checkpoint_tso":' 449527866234765315, '"checkpoint_time":' '"2024-05-04' '16:52:02.148",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 16:52:08 CST 2024] <<<<<< run test case changefeed_finish success! >>>>>> [2024/05/04 16:52:13.829 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsfbe0f088_ad80_4e82_b4ba_13e4df170cad"] [2024/05/04 16:52:13.841 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs02f617e0_4b19_46d1_b98d_9a58cccb844f"] [2024/05/04 16:52:13.867 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLse16f01a4_fd08_45fd_b279_c0fd818a3c41"] [2024/05/04 16:52:13.869 +08:00] [INFO] [main.go:88] ["testGetDefaultValue take 12.256314668s"] [2024/05/04 16:52:14.029 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:14.032 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:14.041 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:14.048 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:14.050 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:14.056 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:14.122 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:14.133 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:14.142 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:14.154 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:14.264 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:14.265 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:14.324 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:14.326 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:14.344 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:14.348 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:14.455 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:14.455 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:14.462 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:14.514 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:14.519 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:14.522 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:14.556 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:14.566 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:14.574 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:14.611 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:14.714 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:14.721 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:14.733 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:14.733 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:14.751 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:14.759 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:14.856 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:14.856 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:14.932 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:14.936 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:14.940 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:14.940 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:15.021 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:15.033 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:15.038 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:15.050 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:15.142 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:15.151 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:15.159 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:15.164 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:15.221 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:15.227 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:15.319 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:15.319 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:15.341 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:15.343 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:15.346 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] table ddl_manager.finish_mark not exists for 71-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.finish_mark not exists for 6-th check, retry later [2024/05/04 16:52:15.360 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:15.442 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:15.448 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:15.461 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:15.463 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:15.586 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:15.588 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:15.613 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:15.619 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:15.641 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:15.645 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:15.725 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:15.726 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:15.747 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:15.747 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:15.748 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:15.768 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:15.840 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:15.846 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:15.856 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:15.857 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] check diff failed 2-th time, retry later [2024/05/04 16:52:16.022 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:16.031 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:16.033 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:16.041 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:16.068 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:16.068 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:16.161 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:16.161 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:16.228 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:16.229 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:16.231 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:16.237 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:16.273 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:16.317 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:16.324 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:16.327 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:16.439 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:16.448 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:16.458 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:16.513 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:16.528 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:16.538 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:16.617 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:16.621 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:16.661 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:16.666 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:16.667 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:16.670 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:16.731 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:16.751 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:16.762 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:16.770 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:16.846 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:16.854 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:16.925 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:16.950 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:16.956 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:16.970 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:17.059 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:17.113 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] table ddl_manager.finish_mark exists check diff successfully [2024/05/04 16:52:17.160 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:17.168 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:17.213 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:17.214 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:17.248 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:17.265 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:17.280 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:17.281 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:17.338 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:17.344 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:17.382 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.finish_mark not exists for 7-th check, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.common_1.cli.8541.out cli tso query --pd=http://127.0.0.1:2379 [2024/05/04 16:52:17.427 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:17.428 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:17.436 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:17.520 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:17.534 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:17.615 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] check diff failed 3-th time, retry later wait process cdc.test exit for 1-th time... [2024/05/04 16:52:17.643 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:17.645 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:17.649 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:17.714 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:17.730 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:17.751 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:17.753 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:17.774 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:17.776 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:17.843 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:17.848 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:17.858 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:17.869 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] wait process cdc.test exit for 2-th time... [2024/05/04 16:52:17.920 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:17.938 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:18.012 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:18.057 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:18.058 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:18.068 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:18.130 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:18.148 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:18.229 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:18.232 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:18.237 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:18.244 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:18.319 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:18.342 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:18.354 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:18.358 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:18.362 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:18.375 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:18.445 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:18.524 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:18.527 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:18.537 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:18.547 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:18.564 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:18.650 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:18.652 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:18.656 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] wait process cdc.test exit for 3-th time... [2024/05/04 16:52:18.659 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:18.734 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:18.768 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:18.769 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:18.822 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:18.828 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:18.829 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:18.863 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:18.958 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:19.013 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:19.018 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:19.026 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:19.036 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:19.153 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:19.153 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:19.158 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:19.161 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] wait process cdc.test exit for 4-th time... table test.finish_mark not exists for 8-th check, retry later [Sat May 4 16:52:18 CST 2024] <<<<<< START cdc server in batch_add_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.batch_add_table.59605962.out server --log-file /tmp/tidb_cdc_test/batch_add_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/batch_add_table/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + set +x + tso='449527870281744387 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527870281744387 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 16:52:19 CST 2024] <<<<<< START cdc server in common_1 case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.common_1.85828584.out server --log-file /tmp/tidb_cdc_test/common_1/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/common_1/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [2024/05/04 16:52:19.249 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:19.274 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:19.319 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 16:52:19.336 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:19.338 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 16:52:19.340 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:19.365 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 16:52:19.462 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:19.463 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] check diff failed 4-th time, retry later [2024/05/04 16:52:19.467 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:19.518 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 16:52:19.521 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:19.619 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:19.627 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:19.628 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:19.643 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:19.719 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] wait process cdc.test exit for 5-th time... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3770180014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844, pid:12468, start at 2024-05-04 16:52:18.603296325 +0800 CST m=+5.245238388 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:54:18.609 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:52:18.566 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:42:18.566 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 16:52:19.756 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:19.758 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 16:52:19.820 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 16:52:19.823 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:19.825 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:19.845 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 16:52:19.928 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 16:52:19.931 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 16:52:19.956 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 16:52:19.963 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 16:52:20.051 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:20.056 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:20.067 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 16:52:20.117 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:20.139 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:20.214 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:20.240 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 16:52:20.256 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:20.258 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] wait process cdc.test exit for 6-th time... [2024/05/04 16:52:20.263 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 16:52:20.351 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 16:52:20.359 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 16:52:20.373 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 16:52:20.415 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 16:52:20.461 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 16:52:20.464 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 16:52:20.482 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 16:52:20.532 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 16:52:20.552 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 16:52:20.626 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:20.641 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/05/04 16:52:20.661 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/05/04 16:52:20.665 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:20.665 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] wait process cdc.test exit for 7-th time... [2024/05/04 16:52:20.841 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 16:52:20.856 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] [2024/05/04 16:52:20.860 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/05/04 16:52:20.931 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] [2024/05/04 16:52:20.965 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 16:52:20.966 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] wait process cdc.test exit for 8-th time... [2024/05/04 16:52:21.120 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 16:52:21.145 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 16:52:21.327 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 16:52:21.341 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 16:52:21.342 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] wait process cdc.test exit for 9-th time... [2024/05/04 16:52:21.615 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 16:52:21.664 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 16:52:21.745 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 16:52:21.745 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3770180014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844, pid:12468, start at 2024-05-04 16:52:18.603296325 +0800 CST m=+5.245238388 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:54:18.609 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:52:18.566 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:42:18.566 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b377a440015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844, pid:12551, start at 2024-05-04 16:52:19.25063503 +0800 CST m=+5.837245923 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:54:19.256 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:52:19.217 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:42:19.217 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash/log/error.log arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash-proxy.toml"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table test.finish_mark not exists for 9-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:52:21 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/da4183e7-99c7-4443-b895-12d3d618f806 {"id":"da4183e7-99c7-4443-b895-12d3d618f806","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812738} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cd94de2c da4183e7-99c7-4443-b895-12d3d618f806 /tidb/cdc/default/default/upstream/7365064554742071905 {"id":7365064554742071905,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/da4183e7-99c7-4443-b895-12d3d618f806 {"id":"da4183e7-99c7-4443-b895-12d3d618f806","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812738} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cd94de2c da4183e7-99c7-4443-b895-12d3d618f806 /tidb/cdc/default/default/upstream/7365064554742071905 {"id":7365064554742071905,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/da4183e7-99c7-4443-b895-12d3d618f806 {"id":"da4183e7-99c7-4443-b895-12d3d618f806","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812738} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cd94de2c da4183e7-99c7-4443-b895-12d3d618f806 /tidb/cdc/default/default/upstream/7365064554742071905 {"id":7365064554742071905,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.batch_add_table.cli.6022.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-batch-add-table-test-21667?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' [2024/05/04 16:52:21.925 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs84cf1659_4809_464a_bbdf_0c7f16607437"] [2024/05/04 16:52:21.931 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/05/04 16:52:21.973 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs473a083f_23fa_40cf_aa8c_c38890f6b79e"] [2024/05/04 16:52:21.975 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/05/04 16:52:22.029 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsd63a1252_a0f8_4356_a968_a2213a68a081"] =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/force_replicate_table/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... Create changefeed successfully! ID: c7c8e013-d997-4934-9714-621b3963efa4 Info: {"upstream_id":7365064554742071905,"namespace":"default","id":"c7c8e013-d997-4934-9714-621b3963efa4","sink_uri":"kafka://127.0.0.1:9092/ticdc-batch-add-table-test-21667?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:52:21.889231862+08:00","start_ts":449527871374884867,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527871374884867,"checkpoint_ts":449527871374884867,"checkpoint_time":"2024-05-04 16:52:21.756"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... check diff failed 5-th time, retry later cdc.test: no process found wait process cdc.test exit for 10-th time... process cdc.test already exit [Sat May 4 16:52:22 CST 2024] <<<<<< run test case ddl_manager success! >>>>>> [2024/05/04 16:52:22.049 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsf4fbf918_16e6_4987_9491_9c98f36caa5f"] [2024/05/04 16:52:22.057 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsa1d9ea35_e69b_4856_a554_004cd18967da"] [2024/05/04 16:52:22.254 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:22.259 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:22.322 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:22.326 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:22.375 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:22.375 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:22.396 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:22.397 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:22.401 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:22.405 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:22.447 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsfb804b4a_8416_4842_8298_a884ed377980"] + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:52:22 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/58f3fe0d-fc50-46e5-a795-efacb8a12f3d {"id":"58f3fe0d-fc50-46e5-a795-efacb8a12f3d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812739} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cd92a1dc 58f3fe0d-fc50-46e5-a795-efacb8a12f3d /tidb/cdc/default/default/upstream/7365064557020183962 {"id":7365064557020183962,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/58f3fe0d-fc50-46e5-a795-efacb8a12f3d {"id":"58f3fe0d-fc50-46e5-a795-efacb8a12f3d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812739} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cd92a1dc 58f3fe0d-fc50-46e5-a795-efacb8a12f3d /tidb/cdc/default/default/upstream/7365064557020183962 {"id":7365064557020183962,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/58f3fe0d-fc50-46e5-a795-efacb8a12f3d {"id":"58f3fe0d-fc50-46e5-a795-efacb8a12f3d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812739} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cd92a1dc 58f3fe0d-fc50-46e5-a795-efacb8a12f3d /tidb/cdc/default/default/upstream/7365064557020183962 {"id":7365064557020183962,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: fa3b7a3d-c148-4de3-9475-1c6c307093c9 Info: {"upstream_id":7365064557020183962,"namespace":"default","id":"fa3b7a3d-c148-4de3-9475-1c6c307093c9","sink_uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-25759?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:52:22.388409914+08:00","start_ts":449527870281744387,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527870281744387,"checkpoint_ts":449527870281744387,"checkpoint_time":"2024-05-04 16:52:17.586"} [Sat May 4 16:52:22 CST 2024] <<<<<< START kafka consumer in common_1 case >>>>>> =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/resourcecontrol/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [2024/05/04 16:52:22.552 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs205e8d1f_0080_4888_a769_279a609af77f"] [2024/05/04 16:52:22.565 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:22.567 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:22.724 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:22.737 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:22.819 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:22.831 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:22.845 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:22.848 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:22.852 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:22.860 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:22.927 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:22.928 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:22.977 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:23.020 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:23.020 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:23.024 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:23.114 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs0e5c28a2_1b1d_4747_8183_5429253c61f1"] [2024/05/04 16:52:23.141 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:23.227 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] + set +x [Sat May 4 16:52:23 CST 2024] <<<<<< START kafka consumer in batch_add_table case >>>>>> [2024/05/04 16:52:23.344 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:23.436 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:23.444 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:23.446 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:23.515 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:23.525 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:23.552 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:23.561 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [Sat May 4 16:52:23 CST 2024] <<<<<< START cdc server in mq_sink_dispatcher case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.1393613938.out server --log-file /tmp/tidb_cdc_test/mq_sink_dispatcher/cdc.log --log-level info --data-dir /tmp/tidb_cdc_test/mq_sink_dispatcher/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table test.finish_mark not exists for 10-th check, retry later [2024/05/04 16:52:23.632 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:23.646 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:23.655 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:23.711 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:23.737 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:23.740 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:23.752 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] start tidb cluster in /tmp/tidb_cdc_test/resourcecontrol Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... check diff failed 6-th time, retry later table batch_add_table.finish_mark not exists for 1-th check, retry later [2024/05/04 16:52:23.834 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:23.927 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:24.032 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:24.038 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:24.039 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:22.649 +08:00] [INFO] [pd_service_discovery.go:1016] ["[pd] switch leader"] [new-leader=http://127.0.0.1:2379] [old-leader=] [2024/05/04 16:52:22.649 +08:00] [INFO] [pd_service_discovery.go:498] ["[pd] init cluster id"] [cluster-id=7365064123084548062] [2024/05/04 16:52:22.649 +08:00] [INFO] [client.go:606] ["[pd] changing service mode"] [old-mode=UNKNOWN_SVC_MODE] [new-mode=PD_SVC_MODE] [2024/05/04 16:52:22.649 +08:00] [INFO] [tso_client.go:236] ["[tso] switch dc tso global allocator serving url"] [dc-location=global] [new-url=http://127.0.0.1:2379] [2024/05/04 16:52:22.650 +08:00] [INFO] [tso_dispatcher.go:359] ["[tso] tso dispatcher created"] [dc-location=global] [2024/05/04 16:52:22.650 +08:00] [INFO] [client.go:612] ["[pd] service mode changed"] [old-mode=UNKNOWN_SVC_MODE] [new-mode=PD_SVC_MODE] [2024/05/04 16:52:22.651 +08:00] [INFO] [pd_service_discovery.go:1016] ["[pd] switch leader"] [new-leader=http://127.0.0.1:2379] [old-leader=] [2024/05/04 16:52:22.651 +08:00] [INFO] [pd_service_discovery.go:498] ["[pd] init cluster id"] [cluster-id=7365064123084548062] [2024/05/04 16:52:22.651 +08:00] [INFO] [client.go:606] ["[pd] changing service mode"] [old-mode=UNKNOWN_SVC_MODE] [new-mode=PD_SVC_MODE] [2024/05/04 16:52:22.651 +08:00] [INFO] [tso_client.go:236] ["[tso] switch dc tso global allocator serving url"] [dc-location=global] [new-url=http://127.0.0.1:2379] [2024/05/04 16:52:22.652 +08:00] [INFO] [tso_dispatcher.go:359] ["[tso] tso dispatcher created"] [dc-location=global] [2024/05/04 16:52:22.652 +08:00] [INFO] [client.go:612] ["[pd] service mode changed"] [old-mode=UNKNOWN_SVC_MODE] [new-mode=PD_SVC_MODE] [2024/05/04 16:52:22.652 +08:00] [INFO] [tikv_driver.go:197] ["using API V1."] [2024/05/04 16:52:22.653 +08:00] [INFO] [main.go:180] ["genLock started"] [2024/05/04 16:52:22.655 +08:00] [INFO] [store_cache.go:477] ["change store resolve state"] [store=2] [addr=127.0.0.1:20160] [from=unresolved] [to=resolved] [liveness-state=reachable] [2024/05/04 16:52:24.135 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:24.145 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:24.221 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:24.227 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:24.247 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:24.265 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:24.315 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:24.317 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] start tidb cluster in /tmp/tidb_cdc_test/force_replicate_table Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... [2024/05/04 16:52:24.335 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:24.345 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:24.354 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:24.425 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:24.457 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:24.620 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:24.622 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:24.624 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:24.724 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:24.735 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:24.822 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:24.832 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:24.856 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:24.922 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:24.934 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:24.935 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:24.946 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:24.960 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:25.017 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:25.033 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:25.117 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:25.234 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:25.238 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:25.241 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:25.420 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:25.430 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:25.449 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:25.532 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:25.548 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] table test.finish_mark not exists for 11-th check, retry later [2024/05/04 16:52:25.633 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:25.652 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:25.653 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:25.718 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:25.728 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:25.729 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:25.743 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:25.820 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:25.840 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] check diff successfully table batch_add_table.finish_mark not exists for 2-th check, retry later [2024/05/04 16:52:25.921 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:25.942 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:26.030 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:26.040 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:26.047 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:26.143 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:26.212 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:26.244 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:26.437 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:26.437 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:26.438 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:26.445 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:26.455 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:26.525 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:26.556 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:26.558 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:26.628 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:26.712 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:26.719 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:26.721 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:26.733 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:26.754 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:26.824 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:26.845 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:52:26 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/76d86da0-35d3-4831-8eaa-72ffdaf7e6f0 {"id":"76d86da0-35d3-4831-8eaa-72ffdaf7e6f0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812744} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cdafddcd 76d86da0-35d3-4831-8eaa-72ffdaf7e6f0 /tidb/cdc/default/default/upstream/7365064596841792982 {"id":7365064596841792982,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/76d86da0-35d3-4831-8eaa-72ffdaf7e6f0 {"id":"76d86da0-35d3-4831-8eaa-72ffdaf7e6f0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812744} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cdafddcd 76d86da0-35d3-4831-8eaa-72ffdaf7e6f0 /tidb/cdc/default/default/upstream/7365064596841792982 {"id":7365064596841792982,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/76d86da0-35d3-4831-8eaa-72ffdaf7e6f0 {"id":"76d86da0-35d3-4831-8eaa-72ffdaf7e6f0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812744} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cdafddcd 76d86da0-35d3-4831-8eaa-72ffdaf7e6f0 /tidb/cdc/default/default/upstream/7365064596841792982 {"id":7365064596841792982,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x [2024/05/04 16:52:27.040 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:27.117 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:27.118 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:27.121 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.cli.14014.out cli tso query --pd=http://127.0.0.1:2379 [2024/05/04 16:52:27.146 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:27.157 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:27.233 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:27.245 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:27.315 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:27.346 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:27.349 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:27.350 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:27.355 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:27.413 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:27.417 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:27.440 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] table test.finish_mark not exists for 12-th check, retry later [2024/05/04 16:52:27.520 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:27.630 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:27.632 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:27.637 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:27.724 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:27.737 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:27.816 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:27.913 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:27.929 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [2024/05/04 16:52:28.029 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:28.046 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:28.050 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:28.114 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:28.122 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:28.135 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:28.148 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:28.163 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:28.252 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:28.255 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:28.255 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] check diff failed 1-th time, retry later table batch_add_table.finish_mark exists check diff successfully [2024/05/04 16:52:28.341 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:28.349 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:28.420 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] wait process cdc.test exit for 1-th time... Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release [2024/05/04 16:52:28.542 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:28.545 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:28.713 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:28.727 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:28.733 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:28.822 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:28.835 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:28.838 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:28.846 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:28.847 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:29.016 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:29.019 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:29.028 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] + set +x + tso='449527872845774851 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527872845774851 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.cli.14048.out cli changefeed create --start-ts=449527872845774851 '--sink-uri=kafka://127.0.0.1:9092/dispatcher-test?protocol=canal-json&enable-tidb-extension=true' -c test --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_dispatcher/conf/changefeed.toml [2024/05/04 16:52:29.114 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:29.120 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:29.124 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:29.236 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:29.248 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] wait process cdc.test exit for 2-th time... Create changefeed successfully! ID: test Info: {"upstream_id":7365064596841792982,"namespace":"default","id":"test","sink_uri":"kafka://127.0.0.1:9092/dispatcher-test?protocol=canal-json\u0026enable-tidb-extension=true","create_time":"2024-05-04T16:52:29.321187527+08:00","start_ts":449527872845774851,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"dispatchers":[{"matcher":["verify.t"],"partition":"index-value"},{"matcher":["dispatcher.index"],"partition":"index-value","index":"idx_a"}],"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527872845774851,"checkpoint_ts":449527872845774851,"checkpoint_time":"2024-05-04 16:52:27.367"} PASS table test.finish_mark not exists for 13-th check, retry later [2024/05/04 16:52:29.326 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:29.337 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:29.344 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:29.429 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:29.448 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:29.511 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:29.513 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:29.530 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] coverage: 2.5% of statements in github.com/pingcap/tiflow/... [2024/05/04 16:52:29.631 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:29.725 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:29.727 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] wait process cdc.test exit for 3-th time... [2024/05/04 16:52:29.839 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:29.866 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:29.872 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:29.913 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:29.917 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:29.941 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:29.958 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:29.963 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:29.984 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:30.026 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 16:52:30 CST 2024] <<<<<< run test case batch_add_table success! >>>>>> [2024/05/04 16:52:30.112 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:30.113 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:30.329 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:30.435 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:30.440 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:30.469 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:30.472 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:30.531 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:30.544 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table common_1.v1 exists table common_1.recover_and_insert not exists for 1-th check, retry later [2024/05/04 16:52:30.553 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:30.569 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:30.569 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:30.589 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:30.595 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 16:52:30.681 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] check diff failed 2-th time, retry later [2024/05/04 16:52:30.834 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:30.914 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:30.941 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:31.025 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:31.034 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:31.051 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:31.064 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:31.122 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:31.126 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 16:52:31.249 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] + set +x check_changefeed_state http://127.0.0.1:2379 test normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=test + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s [2024/05/04 16:52:31.362 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 16:52:31.369 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:31.388 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:31.394 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:31.481 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] + info='{ "upstream_id": 7365064596841792982, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449527872845774851, "checkpoint_time": "2024-05-04 16:52:27.367", "error": null }' + echo '{ "upstream_id": 7365064596841792982, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449527872845774851, "checkpoint_time": "2024-05-04 16:52:27.367", "error": null }' { "upstream_id": 7365064596841792982, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449527872845774851, "checkpoint_time": "2024-05-04 16:52:27.367", "error": null } ++ echo '{' '"upstream_id":' 7365064596841792982, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449527872845774851, '"checkpoint_time":' '"2024-05-04' '16:52:27.367",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365064596841792982, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449527872845774851, '"checkpoint_time":' '"2024-05-04' '16:52:27.367",' '"error":' null '}' ++ jq -r .error.message + message=null + [[ ! null =~ null ]] run task successfully [2024/05/04 16:52:31.621 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsb5df9d2c_4c6e_44a3_b92a_e791f98b5822"] [2024/05/04 16:52:31.627 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs99dd42c0_d3f2_4b4f_b796_2741451369cb"] [2024/05/04 16:52:31.685 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsf5f08cbb_640f_4699_8f34_085be3714f2b"] [2024/05/04 16:52:31.747 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs826b2438_f585_4060_afdd_67e0073e22a8"] [2024/05/04 16:52:31.786 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] table test.finish_mark not exists for 14-th check, retry later check_changefeed_state http://127.0.0.1:2379 test failed ErrDispatcherFailed + endpoints=http://127.0.0.1:2379 + changefeed_id=test + expected_state=failed + error_msg=ErrDispatcherFailed + tls_dir=ErrDispatcherFailed + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s [2024/05/04 16:52:31.834 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:31.944 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:31.952 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:32.016 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:32.020 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] + info='{ "upstream_id": 7365064596841792982, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449527872845774851, "checkpoint_time": "2024-05-04 16:52:27.367", "error": null }' + echo '{ "upstream_id": 7365064596841792982, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449527872845774851, "checkpoint_time": "2024-05-04 16:52:27.367", "error": null }' { "upstream_id": 7365064596841792982, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449527872845774851, "checkpoint_time": "2024-05-04 16:52:27.367", "error": null } ++ echo '{' '"upstream_id":' 7365064596841792982, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449527872845774851, '"checkpoint_time":' '"2024-05-04' '16:52:27.367",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \f\a\i\l\e\d ]] + echo 'changefeed state normal does not equal to failed' changefeed state normal does not equal to failed + exit 1 run task failed 1-th time, retry later [2024/05/04 16:52:32.132 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:32.133 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:32.215 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs91f658b5_6392_4f20_a246_06d93ae5d08d"] [2024/05/04 16:52:32.315 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 16:52:32.329 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:32.537 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:32.551 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:32.556 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:32.556 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] TEST FAILED: OUTPUT DOES NOT CONTAIN 'id: 1' ____________________________________ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ check data failed 1-th time, retry later check data successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 16:52:29 CST 2024] <<<<<< run test case ddl_puller_lag success! >>>>>> table common_1.recover_and_insert not exists for 2-th check, retry later check diff failed 3-th time, retry later [2024/05/04 16:52:32.654 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs94e87a11_88f6_441f_b7c5_d2d745138f43"] [2024/05/04 16:52:32.654 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:32.657 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:32.720 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:32.722 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:32.743 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsd2440951_ffbe_494f_8acf_73dadf73b41d"] [2024/05/04 16:52:32.758 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:32.820 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:33.046 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:33.062 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:33.062 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:33.066 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:33.228 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:33.231 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:33.233 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:33.243 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:33.253 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:33.255 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:33.335 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:33.336 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:33.346 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:33.355 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] table test.finish_mark not exists for 15-th check, retry later [2024/05/04 16:52:33.614 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:33.619 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:33.619 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:33.619 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:33.676 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:33.678 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:33.682 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:33.716 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:33.752 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:33.754 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:33.776 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:33.780 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:33.787 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:33.815 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:32.660 +08:00] [INFO] [main.go:196] ["genLock done"] [2024/05/04 16:52:32.660 +08:00] [INFO] [pd_service_discovery.go:550] ["[pd] exit member loop due to context canceled"] [2024/05/04 16:52:32.660 +08:00] [INFO] [tso_dispatcher.go:214] ["exit tso requests cancel loop"] [2024/05/04 16:52:32.660 +08:00] [INFO] [resource_manager_client.go:295] ["[resource manager] exit resource token dispatcher"] [2024/05/04 16:52:32.660 +08:00] [INFO] [tso_client.go:140] ["closing tso client"] [2024/05/04 16:52:32.660 +08:00] [INFO] [tso_dispatcher.go:268] ["exit tso dispatcher loop"] [2024/05/04 16:52:32.660 +08:00] [INFO] [tso_dispatcher.go:455] ["[tso] stop fetching the pending tso requests due to context canceled"] [dc-location=global] [2024/05/04 16:52:32.660 +08:00] [INFO] [tso_dispatcher.go:380] ["[tso] exit tso dispatcher"] [dc-location=global] [2024/05/04 16:52:32.660 +08:00] [INFO] [tso_batch_controller.go:158] ["[pd] clear the tso batch controller"] [max-batch-size=10000] [best-batch-size=1] [collected-request-count=0] [pending-request-count=0] [2024/05/04 16:52:32.660 +08:00] [INFO] [tso_client.go:145] ["close tso client"] [2024/05/04 16:52:32.660 +08:00] [INFO] [tso_batch_controller.go:158] ["[pd] clear the tso batch controller"] [max-batch-size=10000] [best-batch-size=1] [collected-request-count=0] [pending-request-count=0] [2024/05/04 16:52:32.660 +08:00] [INFO] [tso_client.go:155] ["tso client is closed"] [2024/05/04 16:52:32.660 +08:00] [INFO] [pd_service_discovery.go:637] ["[pd] close pd service discovery client"] [2024/05/04 16:52:34.222 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:34.222 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:34.225 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:34.231 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:34.330 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsa8180927_1f11_4f22_965e_b345467d2819"] [2024/05/04 16:52:34.347 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:34.351 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:34.361 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_source/run.sh using Sink-Type: kafka... <<================= ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table common_1.recover_and_insert not exists for 3-th check, retry later [2024/05/04 16:52:34.419 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:34.453 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:34.455 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:34.458 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:34.461 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:34.465 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:34.465 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] check diff failed 4-th time, retry later check_changefeed_state http://127.0.0.1:2379 test failed ErrDispatcherFailed + endpoints=http://127.0.0.1:2379 + changefeed_id=test + expected_state=failed + error_msg=ErrDispatcherFailed + tls_dir=ErrDispatcherFailed + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s + info='{ "upstream_id": 7365064596841792982, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449527874025422886, "checkpoint_time": "2024-05-04 16:52:31.867", "error": null }' + echo '{ "upstream_id": 7365064596841792982, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449527874025422886, "checkpoint_time": "2024-05-04 16:52:31.867", "error": null }' { "upstream_id": 7365064596841792982, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449527874025422886, "checkpoint_time": "2024-05-04 16:52:31.867", "error": null } ++ echo '{' '"upstream_id":' 7365064596841792982, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449527874025422886, '"checkpoint_time":' '"2024-05-04' '16:52:31.867",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \f\a\i\l\e\d ]] + echo 'changefeed state normal does not equal to failed' changefeed state normal does not equal to failed + exit 1 run task failed 2-th time, retry later [2024/05/04 16:52:34.750 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:34.818 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:34.821 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:34.824 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:34.931 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:34.939 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:34.947 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:34.953 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:35.041 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:35.050 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:35.141 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:35.145 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:35.147 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:35.147 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:35.147 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:35.215 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:35.443 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:35.448 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:35.448 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:35.450 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:35.551 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:35.620 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:35.620 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:35.638 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] table test.finish_mark not exists for 16-th check, retry later [2024/05/04 16:52:35.718 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:35.722 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:35.817 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:35.817 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:35.820 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:35.824 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:35.824 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:35.841 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b38748c0019 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tvq5x-hqf9m, pid:11130, start at 2024-05-04 16:52:35.281752925 +0800 CST m=+5.379558866 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:54:35.289 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:52:35.286 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:42:35.286 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table common_1.recover_and_insert exists table common_1.finish_mark not exists for 1-th check, retry later [2024/05/04 16:52:36.239 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:36.242 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:36.245 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:36.248 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:36.340 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:36.347 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:36.354 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:36.419 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:36.440 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:36.444 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:36.464 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:36.464 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:36.512 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:36.512 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:36.513 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:36.530 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:36.922 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:36.929 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:36.930 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:36.947 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] check diff failed 5-th time, retry later [2024/05/04 16:52:37.018 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:37.021 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:37.030 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:37.051 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:37.075 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:37.113 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:37.130 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:37.131 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:37.135 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:37.142 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:37.143 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:37.147 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] table test.finish_mark exists check diff successfully [2024/05/04 16:52:37.545 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:37.617 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:37.629 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:37.643 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:37.728 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:37.735 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:37.747 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:37.764 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:37.828 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:37.833 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:37.850 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:37.852 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:37.854 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:37.858 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:37.860 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:37.866 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] wait process cdc.test exit for 1-th time... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b38748c0019 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tvq5x-hqf9m, pid:11130, start at 2024-05-04 16:52:35.281752925 +0800 CST m=+5.379558866 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:54:35.289 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:52:35.286 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:42:35.286 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3876dc0002 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tvq5x-hqf9m, pid:11217, start at 2024-05-04 16:52:35.385234663 +0800 CST m=+5.421817555 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:54:35.392 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:52:35.383 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:42:35.383 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/resourcecontrol/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/resourcecontrol/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/resourcecontrol/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/resourcecontrol/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/resourcecontrol/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table test.t2 not exists for 1-th check, retry later table common_1.finish_mark not exists for 2-th check, retry later [2024/05/04 16:52:38.124 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:38.139 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:38.228 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:38.235 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:38.314 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:38.330 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:38.349 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:38.411 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:38.449 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:38.519 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:38.532 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:38.536 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:38.544 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] wait process cdc.test exit for 2-th time... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b38771c0013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-s880q-kprjl, pid:4441, start at 2024-05-04 16:52:35.432205138 +0800 CST m=+5.240976974 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:54:35.441 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:52:35.399 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:42:35.399 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b38771c0013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-s880q-kprjl, pid:4441, start at 2024-05-04 16:52:35.432205138 +0800 CST m=+5.240976974 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:54:35.441 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:52:35.399 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:42:35.399 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b38789c0007 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-s880q-kprjl, pid:4516, start at 2024-05-04 16:52:35.501607637 +0800 CST m=+5.238211961 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:54:35.508 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:52:35.495 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:42:35.495 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/force_replicate_table/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/force_replicate_table/tiflash/log/error.log arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/force_replicate_table/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/force_replicate_table/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/force_replicate_table/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } [2024/05/04 16:52:38.616 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:38.616 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:38.617 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:38.820 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:38.838 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] check_changefeed_state http://127.0.0.1:2379 test failed ErrDispatcherFailed + endpoints=http://127.0.0.1:2379 + changefeed_id=test + expected_state=failed + error_msg=ErrDispatcherFailed + tls_dir=ErrDispatcherFailed + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s + info='{ "upstream_id": 7365064596841792982, "namespace": "default", "id": "test", "state": "failed", "checkpoint_tso": 449527874025422886, "checkpoint_time": "2024-05-04 16:52:31.867", "error": { "time": "2024-05-04T16:52:35.66401997+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrDispatcherFailed", "message": "[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a" } }' + echo '{ "upstream_id": 7365064596841792982, "namespace": "default", "id": "test", "state": "failed", "checkpoint_tso": 449527874025422886, "checkpoint_time": "2024-05-04 16:52:31.867", "error": { "time": "2024-05-04T16:52:35.66401997+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrDispatcherFailed", "message": "[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a" } }' { "upstream_id": 7365064596841792982, "namespace": "default", "id": "test", "state": "failed", "checkpoint_tso": 449527874025422886, "checkpoint_time": "2024-05-04 16:52:31.867", "error": { "time": "2024-05-04T16:52:35.66401997+08:00", "addr": "127.0.0.1:8300", "code": "CDC:ErrDispatcherFailed", "message": "[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a" } } ++ echo '{' '"upstream_id":' 7365064596841792982, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"failed",' '"checkpoint_tso":' 449527874025422886, '"checkpoint_time":' '"2024-05-04' '16:52:31.867",' '"error":' '{' '"time":' '"2024-05-04T16:52:35.66401997+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrDispatcherFailed",' '"message":' '"[CDC:ErrDispatcherFailed]index' not found when dispatch event, table: index, index: 'idx_a"' '}' '}' ++ jq -r .state + state=failed + [[ ! failed == \f\a\i\l\e\d ]] ++ echo '{' '"upstream_id":' 7365064596841792982, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"failed",' '"checkpoint_tso":' 449527874025422886, '"checkpoint_time":' '"2024-05-04' '16:52:31.867",' '"error":' '{' '"time":' '"2024-05-04T16:52:35.66401997+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrDispatcherFailed",' '"message":' '"[CDC:ErrDispatcherFailed]index' not found when dispatch event, table: index, index: 'idx_a"' '}' '}' ++ jq -r .error.message + message='[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a' + [[ ! [CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a =~ ErrDispatcherFailed ]] run task successfully + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.cli.14286.out cli changefeed update -c test '--sink-uri=kafka://127.0.0.1:9092/dispatcher-test?protocol=canal-json&enable-tidb-extension=true' --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_dispatcher/conf/new_changefeed.toml --no-confirm check diff successfully [2024/05/04 16:52:38.946 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:38.952 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:39.021 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:39.036 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:39.047 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:39.058 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] Diff of changefeed config: {Type:update Path:[Config SyncPointInterval] From: To:0xc0039ac928} {Type:update Path:[Config SyncPointRetention] From: To:0xc0039ac938} {Type:update Path:[Config Sink DispatchRules 0 Matcher 0] From:verify.t To:dispatcher.index} {Type:delete Path:[Config Sink DispatchRules 1 Matcher 0] From:dispatcher.index To:} {Type:delete Path:[Config Sink DispatchRules 1 PartitionRule] From:index-value To:} {Type:delete Path:[Config Sink DispatchRules 1 IndexName] From:idx_a To:} {Type:update Path:[Config Consistent] From: To:0xc000f2ba40} [2024/05/04 16:52:39.133 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:39.152 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:39.159 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:39.160 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:39.223 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:39.244 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:39.246 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:39.247 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:39.362 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] Update changefeed config successfully! ID: test Info: {"upstream_id":7365064596841792982,"namespace":"default","id":"test","sink_uri":"kafka://127.0.0.1:9092/dispatcher-test?protocol=canal-json\u0026enable-tidb-extension=true","create_time":"2024-05-04T16:52:29.321187527+08:00","start_ts":449527872845774851,"admin_job_type":1,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_table_monitor":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","dispatchers":[{"matcher":["dispatcher.index"],"partition":"index-value"}],"encoder_concurrency":32,"terminator":"\r\n","enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"failed","error":{"addr":"127.0.0.1:8300","code":"CDC:ErrDispatcherFailed","message":"[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a"},"creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":0,"checkpoint_ts":449527874025422886,"checkpoint_time":"2024-05-04 16:52:31.867"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... [2024/05/04 16:52:39.434 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:39.628 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:39.640 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:39.654 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:39.717 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:39.742 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:39.754 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:39.765 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:39.766 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:39.778 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:39.824 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:39.824 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:39.825 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:39.887 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:39.922 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:40.022 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:40.053 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] table test.t2 not exists for 2-th check, retry later [2024/05/04 16:52:40.137 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:40.232 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:40.312 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:40.342 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:40.346 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:40.349 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] wait process cdc.test exit for 3-th time... [2024/05/04 16:52:40.414 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:40.445 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:40.447 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:40.447 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:40.556 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:40.559 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:40.615 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 16:52:40.632 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resourcecontrol.cli.12566.out cli tso query --pd=http://127.0.0.1:2379 [Sat May 4 16:52:40 CST 2024] <<<<<< START cdc server in force_replicate_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.force_replicate_table.58905892.out server --log-file /tmp/tidb_cdc_test/force_replicate_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/force_replicate_table/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.cli.14321.out cli changefeed resume -c test cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 16:52:40 CST 2024] <<<<<< run test case many_pk_or_uk success! >>>>>> table common_1.finish_mark not exists for 3-th check, retry later [2024/05/04 16:52:40.654 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:40.681 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:40.685 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:40.689 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:40.823 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 16:52:40.924 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:40.927 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:40.927 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:41.076 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:41.082 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:41.149 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:41.162 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:41.163 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 16:52:41.163 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:41.230 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:41.231 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:41.332 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs42d0c4ee_674d_4aca_a7c3_5e2d3ac7d97e"] [2024/05/04 16:52:41.338 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs4b85f2f2_9121_4c84_bafa_1f7701b7516d"] check diff failed 1-th time, retry later [2024/05/04 16:52:41.427 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:41.430 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:41.538 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:41.559 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 16:52:41.559 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 16:52:41.631 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] PASS coverage: 2.1% of statements in github.com/pingcap/tiflow/... [2024/05/04 16:52:41.687 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:41.691 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:41.698 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:41.746 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:41.750 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:41.753 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:41.848 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs4ac58c0f_ebf9_402d_8f5d_2215f80f1380"] [2024/05/04 16:52:42.146 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:42.148 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:42.150 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_rocks/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [2024/05/04 16:52:42.213 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:42.215 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:42.219 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:42.258 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:42.264 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:42.329 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs79eca58f_ecc3_4623_bad3_ba2f487eb942"] =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/ddl_only_block_related_table/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... + set +x + tso='449527876310794242 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527876310794242 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 16:52:42 CST 2024] <<<<<< START cdc server in resourcecontrol case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) + GO_FAILPOINTS= + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resourcecontrol.1259912601.out server --log-file /tmp/tidb_cdc_test/resourcecontrol/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/resourcecontrol/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 [2024/05/04 16:52:42.445 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs982a6eb8_8d63_4036_9836_8041d9c1c0c3"] [2024/05/04 16:52:42.521 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:42.526 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:42.529 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:42.653 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:42.657 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] table test.t2 not exists for 3-th check, retry later table common_1.finish_mark not exists for 4-th check, retry later [2024/05/04 16:52:42.714 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:42.759 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:42.759 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:42.842 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:42.845 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] + set +x check_changefeed_state http://127.0.0.1:2379 test normal null + endpoints=http://127.0.0.1:2379 + changefeed_id=test + expected_state=normal + error_msg=null + tls_dir=null + [[ http://127.0.0.1:2379 =~ https ]] ++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s + info='{ "upstream_id": 7365064596841792982, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449527874025422886, "checkpoint_time": "2024-05-04 16:52:31.867", "error": null }' + echo '{ "upstream_id": 7365064596841792982, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449527874025422886, "checkpoint_time": "2024-05-04 16:52:31.867", "error": null }' { "upstream_id": 7365064596841792982, "namespace": "default", "id": "test", "state": "normal", "checkpoint_tso": 449527874025422886, "checkpoint_time": "2024-05-04 16:52:31.867", "error": null } ++ echo '{' '"upstream_id":' 7365064596841792982, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449527874025422886, '"checkpoint_time":' '"2024-05-04' '16:52:31.867",' '"error":' null '}' ++ jq -r .state + state=normal + [[ ! normal == \n\o\r\m\a\l ]] ++ echo '{' '"upstream_id":' 7365064596841792982, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449527874025422886, '"checkpoint_time":' '"2024-05-04' '16:52:31.867",' '"error":' null '}' ++ jq -r .error.message [2024/05/04 16:52:42.937 +08:00] [INFO] [main.go:178] ["72 insert success: 100"] [2024/05/04 16:52:42.938 +08:00] [INFO] [main.go:178] ["73 insert success: 100"] [2024/05/04 16:52:42.956 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:42.959 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:42.967 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:43.058 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:43.117 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 16:52:43.136 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 16:52:43.171 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] + message=null + [[ ! null =~ null ]] run task successfully [2024/05/04 16:52:43.226 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:43.270 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:43.272 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] [2024/05/04 16:52:43.380 +08:00] [INFO] [main.go:178] ["72 insert success: 200"] [2024/05/04 16:52:43.384 +08:00] [INFO] [main.go:178] ["73 insert success: 200"] check diff failed 2-th time, retry later table test.finish_mark not exists for 1-th check, retry later [2024/05/04 16:52:43.432 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:43.435 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:43.435 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:43.472 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:43.492 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"] [2024/05/04 16:52:43.519 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"] [2024/05/04 16:52:43.551 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:43.571 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:43.641 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:43.647 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:43.777 +08:00] [INFO] [main.go:178] ["72 insert success: 300"] [2024/05/04 16:52:43.814 +08:00] [INFO] [main.go:178] ["73 insert success: 300"] [2024/05/04 16:52:43.842 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:43.844 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:43.853 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:43.887 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:43.912 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"] [2024/05/04 16:52:43.934 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"] + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:52:43 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b4d143be-debe-4578-a687-f8c93d1dd820 {"id":"b4d143be-debe-4578-a687-f8c93d1dd820","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812760} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cdf19ece b4d143be-debe-4578-a687-f8c93d1dd820 /tidb/cdc/default/default/upstream/7365064670574747678 {"id":7365064670574747678,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b4d143be-debe-4578-a687-f8c93d1dd820 {"id":"b4d143be-debe-4578-a687-f8c93d1dd820","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812760} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cdf19ece b4d143be-debe-4578-a687-f8c93d1dd820 /tidb/cdc/default/default/upstream/7365064670574747678 {"id":7365064670574747678,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b4d143be-debe-4578-a687-f8c93d1dd820 {"id":"b4d143be-debe-4578-a687-f8c93d1dd820","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812760} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cdf19ece b4d143be-debe-4578-a687-f8c93d1dd820 /tidb/cdc/default/default/upstream/7365064670574747678 {"id":7365064670574747678,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x Create changefeed successfully! ID: fadd8e26-9a32-4902-9fb1-271c296357d0 Info: {"upstream_id":7365064670574747678,"namespace":"default","id":"fadd8e26-9a32-4902-9fb1-271c296357d0","sink_uri":"kafka://127.0.0.1:9092/ticdc-force_replicate_table-test-24427?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:52:43.937887474+08:00","start_ts":449527876314464257,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":true,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527876314464257,"checkpoint_ts":449527876314464257,"checkpoint_time":"2024-05-04 16:52:40.599"} [Sat May 4 16:52:43 CST 2024] <<<<<< START kafka consumer in force_replicate_table case >>>>>> consumer replica config found: /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/force_replicate_table/conf/changefeed.toml [2024/05/04 16:52:43.952 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:43.971 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:43.997 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:44.001 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] [2024/05/04 16:52:44.135 +08:00] [INFO] [main.go:178] ["72 insert success: 400"] [2024/05/04 16:52:44.156 +08:00] [INFO] [main.go:178] ["73 insert success: 400"] The 1 times to try to start tidb cluster... [2024/05/04 16:52:44.244 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:44.247 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:44.251 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:44.364 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:44.368 +08:00] [INFO] [main.go:178] ["73 insert success: 1900"] [2024/05/04 16:52:44.421 +08:00] [INFO] [main.go:178] ["72 insert success: 1900"] [2024/05/04 16:52:44.436 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] table test.t2 not exists for 4-th check, retry later table common_1.finish_mark exists check diff successfully [2024/05/04 16:52:44.453 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:44.456 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:44.458 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:44.542 +08:00] [INFO] [main.go:178] ["72 insert success: 500"] [2024/05/04 16:52:44.557 +08:00] [INFO] [main.go:178] ["73 insert success: 500"] [2024/05/04 16:52:44.591 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:44.595 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:44.602 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:44.680 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] start tidb cluster in /tmp/tidb_cdc_test/multi_rocks Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... wait process cdc.test exit for 1-th time... [2024/05/04 16:52:44.753 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:44.819 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:44.825 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:44.828 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] [2024/05/04 16:52:44.943 +08:00] [INFO] [main.go:178] ["72 insert success: 600"] Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... start tidb cluster in /tmp/tidb_cdc_test/ddl_only_block_related_table Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... [2024/05/04 16:52:44.960 +08:00] [INFO] [main.go:178] ["73 insert success: 600"] [2024/05/04 16:52:45.021 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:45.021 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:45.027 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:45.064 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:45.114 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:45.157 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:45.161 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:45.164 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] wait process cdc.test exit for 2-th time... table test.finish_mark not exists for 2-th check, retry later [2024/05/04 16:52:45.244 +08:00] [INFO] [main.go:178] ["72 insert success: 700"] [2024/05/04 16:52:45.259 +08:00] [INFO] [main.go:178] ["73 insert success: 700"] [2024/05/04 16:52:45.348 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:45.354 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:45.356 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:45.444 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:45.540 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:45.581 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:45.614 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:45.614 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] [2024/05/04 16:52:45.670 +08:00] [INFO] [main.go:178] ["72 insert success: 800"] [2024/05/04 16:52:45.686 +08:00] [INFO] [main.go:178] ["73 insert success: 800"] + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:52:45 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/89bc933c-6b6c-4bb8-be46-8d0d7e0f75ca {"id":"89bc933c-6b6c-4bb8-be46-8d0d7e0f75ca","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812762} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cdf065d5 89bc933c-6b6c-4bb8-be46-8d0d7e0f75ca /tidb/cdc/default/default/upstream/7365064660368717569 {"id":7365064660368717569,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/89bc933c-6b6c-4bb8-be46-8d0d7e0f75ca {"id":"89bc933c-6b6c-4bb8-be46-8d0d7e0f75ca","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812762} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cdf065d5 89bc933c-6b6c-4bb8-be46-8d0d7e0f75ca /tidb/cdc/default/default/upstream/7365064660368717569 {"id":7365064660368717569,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/89bc933c-6b6c-4bb8-be46-8d0d7e0f75ca {"id":"89bc933c-6b6c-4bb8-be46-8d0d7e0f75ca","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812762} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42cdf065d5 89bc933c-6b6c-4bb8-be46-8d0d7e0f75ca /tidb/cdc/default/default/upstream/7365064660368717569 {"id":7365064660368717569,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resourcecontrol.cli.12663.out cli changefeed create --start-ts=449527876310794242 '--sink-uri=kafka://127.0.0.1:9092/ticdc-resourcecontrol-test-32672?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: 6c14bd00-e038-48a5-a066-df58a7141d81 Info: {"upstream_id":7365064660368717569,"namespace":"default","id":"6c14bd00-e038-48a5-a066-df58a7141d81","sink_uri":"kafka://127.0.0.1:9092/ticdc-resourcecontrol-test-32672?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:52:45.68322585+08:00","start_ts":449527876310794242,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527876310794242,"checkpoint_ts":449527876310794242,"checkpoint_time":"2024-05-04 16:52:40.585"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... check diff failed 3-th time, retry later [2024/05/04 16:52:45.724 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:45.729 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:45.737 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:45.786 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:45.836 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:45.918 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:45.924 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:45.926 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] wait process cdc.test exit for 3-th time... [2024/05/04 16:52:45.976 +08:00] [INFO] [main.go:178] ["72 insert success: 900"] [2024/05/04 16:52:45.985 +08:00] [INFO] [main.go:178] ["73 insert success: 900"] [2024/05/04 16:52:46.023 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:46.031 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:46.043 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:46.100 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:46.130 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] table test.t2 not exists for 5-th check, retry later Verifying downstream PD is started... [2024/05/04 16:52:46.225 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:46.235 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:46.238 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:46.284 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"] [2024/05/04 16:52:46.322 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"] [2024/05/04 16:52:46.357 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:46.368 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:46.430 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:46.524 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 16:52:46 CST 2024] <<<<<< run test case common_1 success! >>>>>> [2024/05/04 16:52:46.594 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] [2024/05/04 16:52:46.606 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:46.609 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:46.636 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"] [2024/05/04 16:52:46.643 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"] start tidb cluster in /tmp/tidb_cdc_test/multi_source Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Verifying downstream PD is started... [2024/05/04 16:52:46.835 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:46.918 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] [2024/05/04 16:52:46.937 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:46.939 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:46.965 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"] [2024/05/04 16:52:46.968 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"] + set +x [Sat May 4 16:52:47 CST 2024] <<<<<< START kafka consumer in resourcecontrol case >>>>>> [2024/05/04 16:52:47.136 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:47.222 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:47.253 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:47.256 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:47.275 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"] [2024/05/04 16:52:47.275 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"] [2024/05/04 16:52:47.437 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 16:52:47.514 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] [2024/05/04 16:52:47.552 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:47.552 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:47.557 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"] table resourcecontrol.finish_mark not exists for 1-th check, retry later [2024/05/04 16:52:47.570 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"] [2024/05/04 16:52:47.710 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"] [2024/05/04 16:52:47.796 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release check diff failed 4-th time, retry later table test.finish_mark exists check diff successfully [2024/05/04 16:52:47.840 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"] [2024/05/04 16:52:47.841 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] [2024/05/04 16:52:47.846 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"] [2024/05/04 16:52:47.858 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"] Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release cdc.test: no process found wait process cdc.test exit for 1-th time... process cdc.test already exit [Sat May 4 16:52:48 CST 2024] <<<<<< run test case mq_sink_dispatcher success! >>>>>> table test.t2 not exists for 6-th check, retry later [2024/05/04 16:52:48.551 +08:00] [INFO] [main.go:812] ["testMultiDDLs take %v46.938039519s"] [2024/05/04 16:52:49.061 +08:00] [INFO] [main.go:74] ["DefaultValue integration tests take 47.448260799s"] table mark.finish_mark_1 not exists for 1-th check, retry later Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Debugger for raftstore-v2 is used Debugger for raftstore-v2 is used table resourcecontrol.finish_mark exists check diff failed 5-th time, retry later Starting Upstream TiDB... check diff successfully Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.t2 not exists for 7-th check, retry later wait process cdc.test exit for 1-th time... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 2-th time... wait process cdc.test exit for 3-th time... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 16:52:51 CST 2024] <<<<<< run test case resourcecontrol success! >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table mark.finish_mark_1 exists table mark.finish_mark_2 not exists for 1-th check, retry later table force_replicate_table.t0 exists table force_replicate_table.t1 exists table force_replicate_table.t2 exists table force_replicate_table.t3 not exists for 1-th check, retry later check diff successfully Debugger for raftstore-v2 is used Debugger for raftstore-v2 is used ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.t2 not exists for 8-th check, retry later table force_replicate_table.t3 exists table force_replicate_table.t4 not exists for 1-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table mark.finish_mark_2 not exists for 2-th check, retry later check diff failed 1-th time, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.t2 not exists for 9-th check, retry later table force_replicate_table.t4 exists table force_replicate_table.t5 not exists for 1-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/capture_session_done_during_task/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... table mark.finish_mark_2 not exists for 3-th check, retry later check diff failed 2-th time, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b39a5b00016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3bwq1-gzbp1, pid:7059, start at 2024-05-04 16:52:54.804255008 +0800 CST m=+5.065650419 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:54:54.810 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:52:54.814 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:42:54.814 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b39a5b00016 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3bwq1-gzbp1, pid:7059, start at 2024-05-04 16:52:54.804255008 +0800 CST m=+5.065650419 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:54:54.810 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:52:54.814 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:42:54.814 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b39a7e8000f Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-3bwq1-gzbp1, pid:7146, start at 2024-05-04 16:52:54.922605237 +0800 CST m=+5.131127509 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:54:54.929 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:52:54.906 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:42:54.906 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/ddl_only_block_related_table/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/ddl_only_block_related_table/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/ddl_only_block_related_table/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/ddl_only_block_related_table/tiflash-proxy.toml"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/ddl_only_block_related_table/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table test.t2 not exists for 10-th check, retry later table force_replicate_table.t5 exists table force_replicate_table.t6 not exists for 1-th check, retry later table mark.finish_mark_2 not exists for 4-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [2024/05/04 16:52:50.872 +08:00] [INFO] [case.go:115] ["sync updatePKUK take: 12.679659506s"] [Sat May 4 16:52:58 CST 2024] <<<<<< START cdc server in ddl_only_block_related_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_only_block_related_table.85898591.out server --log-file /tmp/tidb_cdc_test/ddl_only_block_related_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_only_block_related_table/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table test.t2 not exists for 11-th check, retry later check diff failed 3-th time, retry later \033[0;36m<<< Run all test success >>>\033[0m VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b39b0280015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-sj989-z6hpm, pid:7496, start at 2024-05-04 16:52:55.473798082 +0800 CST m=+5.291879217 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:54:55.481 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:52:55.483 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:42:55.483 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b39b0280015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-sj989-z6hpm, pid:7496, start at 2024-05-04 16:52:55.473798082 +0800 CST m=+5.291879217 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:54:55.481 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:52:55.483 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:42:55.483 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b39b1bc0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-sj989-z6hpm, pid:7578, start at 2024-05-04 16:52:55.571181586 +0800 CST m=+5.337838662 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:54:55.578 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:52:55.535 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:42:55.535 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/multi_source/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/multi_source/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_source/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_source/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_source/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } start tidb cluster in /tmp/tidb_cdc_test/capture_session_done_during_task Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... [Pipeline] } table force_replicate_table.t6 exists check_data_subset force_replicate_table.t0 127.0.0.1 4000 127.0.0.1 3306 table mark.finish_mark_2 not exists for 5-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) run task successfully check_data_subset force_replicate_table.t1 127.0.0.1 4000 127.0.0.1 3306 Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc already exists) table test.t2 not exists for 12-th check, retry later [Pipeline] // cache [Pipeline] } check diff failed 4-th time, retry later [Pipeline] // dir /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_dispatcher/run.sh: line 1: 14409 Killed cdc_kafka_consumer --upstream-uri $SINK_URI --downstream-uri="mysql://root@127.0.0.1:3306/?safe-mode=true&batch-dml-enable=false" --upstream-tidb-dsn="root@tcp(${UP_TIDB_HOST}:${UP_TIDB_PORT})/?" --config="$CUR/conf/new_changefeed.toml" 2>&1 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_column_selector/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... [Pipeline] } [Pipeline] // withCredentials [Pipeline] } run task successfully check_data_subset force_replicate_table.t2 127.0.0.1 4000 127.0.0.1 3306 [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] } + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_source.cli.8919.out cli tso query --pd=http://127.0.0.1:2379 [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] } run task successfully check_data_subset force_replicate_table.t3 127.0.0.1 4000 127.0.0.1 3306 [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:53:01 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/8c7ba237-d04c-4eca-aad5-e70d51387681 {"id":"8c7ba237-d04c-4eca-aad5-e70d51387681","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812778} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ce41c2c9 8c7ba237-d04c-4eca-aad5-e70d51387681 /tidb/cdc/default/default/upstream/7365064747669194903 {"id":7365064747669194903,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/8c7ba237-d04c-4eca-aad5-e70d51387681 {"id":"8c7ba237-d04c-4eca-aad5-e70d51387681","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812778} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ce41c2c9 8c7ba237-d04c-4eca-aad5-e70d51387681 /tidb/cdc/default/default/upstream/7365064747669194903 {"id":7365064747669194903,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/8c7ba237-d04c-4eca-aad5-e70d51387681 {"id":"8c7ba237-d04c-4eca-aad5-e70d51387681","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812778} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ce41c2c9 8c7ba237-d04c-4eca-aad5-e70d51387681 /tidb/cdc/default/default/upstream/7365064747669194903 {"id":7365064747669194903,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_only_block_related_table.cli.8646.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-common-1-test-21358?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c=ddl-only-block-related-table Create changefeed successfully! ID: ddl-only-block-related-table Info: {"upstream_id":7365064747669194903,"namespace":"default","id":"ddl-only-block-related-table","sink_uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-21358?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:53:01.602273361+08:00","start_ts":449527881784098821,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527881784098821,"checkpoint_ts":449527881784098821,"checkpoint_time":"2024-05-04 16:53:01.464"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... table mark.finish_mark_2 not exists for 6-th check, retry later run task successfully check_data_subset force_replicate_table.t4 127.0.0.1 4000 127.0.0.1 3306 Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release check diff failed 5-th time, retry later + set +x + tso='449527881618948097 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527881618948097 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 16:53:02 CST 2024] <<<<<< START cdc server in multi_source case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + GO_FAILPOINTS= + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_source.89638965.out server --log-file /tmp/tidb_cdc_test/multi_source/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_source/cdc_data --cluster-id default ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 run task successfully check_data_subset force_replicate_table.t5 127.0.0.1 4000 127.0.0.1 3306 table test.t2 exists check diff successfully + set +x wait process cdc.test exit for 1-th time... run task successfully check_data_subset force_replicate_table.t6 127.0.0.1 4000 127.0.0.1 3306 [Sat May 4 16:53:03 CST 2024] <<<<<< START kafka consumer in ddl_only_block_related_table case >>>>>> ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) id=19,a=NULL doesn't exist in downstream table force_replicate_table.t6 run task failed 1-th time, retry later table ddl_only_block_related_table.finish_mark not exists for 1-th check, retry later wait process cdc.test exit for 2-th time... table mark.finish_mark_2 not exists for 7-th check, retry later wait process cdc.test exit for 3-th time... Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 16:53:04 CST 2024] <<<<<< run test case resolve_lock success! >>>>>> start tidb cluster in /tmp/tidb_cdc_test/kafka_column_selector Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... check diff successfully Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check_data_subset force_replicate_table.t6 127.0.0.1 4000 127.0.0.1 3306 id=7,a=NULL doesn't exist in downstream table force_replicate_table.t6 run task failed 2-th time, retry later table mark.finish_mark_2 not exists for 8-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:53:05 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e32bdbcf-5e6e-4a47-ab32-2b0892bddb70 {"id":"e32bdbcf-5e6e-4a47-ab32-2b0892bddb70","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812782} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ce4972d5 e32bdbcf-5e6e-4a47-ab32-2b0892bddb70 /tidb/cdc/default/default/upstream/7365064753955462020 {"id":7365064753955462020,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e32bdbcf-5e6e-4a47-ab32-2b0892bddb70 {"id":"e32bdbcf-5e6e-4a47-ab32-2b0892bddb70","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812782} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ce4972d5 e32bdbcf-5e6e-4a47-ab32-2b0892bddb70 /tidb/cdc/default/default/upstream/7365064753955462020 {"id":7365064753955462020,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e32bdbcf-5e6e-4a47-ab32-2b0892bddb70 {"id":"e32bdbcf-5e6e-4a47-ab32-2b0892bddb70","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812782} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ce4972d5 e32bdbcf-5e6e-4a47-ab32-2b0892bddb70 /tidb/cdc/default/default/upstream/7365064753955462020 {"id":7365064753955462020,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_source.cli.9021.out cli changefeed create --start-ts=449527881618948097 '--sink-uri=kafka://127.0.0.1:9092/ticdc-multi-source-test-15267?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_only_block_related_table.finish_mark not exists for 2-th check, retry later Create changefeed successfully! ID: 51de7469-9bc0-4ab7-83d2-dc2a79ae680b Info: {"upstream_id":7365064753955462020,"namespace":"default","id":"51de7469-9bc0-4ab7-83d2-dc2a79ae680b","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-source-test-15267?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:53:05.883039374+08:00","start_ts":449527881618948097,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527881618948097,"checkpoint_ts":449527881618948097,"checkpoint_time":"2024-05-04 16:53:00.834"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... check diff failed 1-th time, retry later \033[0;36m<<< Run all test success >>>\033[0m [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc already exists) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // container [Pipeline] } ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] } + set +x [Sat May 4 16:53:07 CST 2024] <<<<<< START kafka consumer in multi_source case >>>>>> go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading go.uber.org/atomic v1.11.0 [Pipeline] // withEnv [Pipeline] } table mark.finish_mark_2 not exists for 9-th check, retry later [Pipeline] // stage [Pipeline] } go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading github.com/coreos/go-semver v0.3.1 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) go: downloading golang.org/x/net v0.24.0 go: downloading github.com/golang/protobuf v1.5.4 go: downloading golang.org/x/sys v0.19.0 go: downloading google.golang.org/protobuf v1.33.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda table ddl_only_block_related_table.finish_mark not exists for 3-th check, retry later go: downloading golang.org/x/text v0.14.0 check diff failed 2-th time, retry later Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release check_data_subset force_replicate_table.t6 127.0.0.1 4000 127.0.0.1 3306 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) table ddl_only_block_related_table.finish_mark not exists for 4-th check, retry later run task successfully table mark.finish_mark_2 exists table mark.finish_mark_3 not exists for 1-th check, retry later go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading golang.org/x/sync v0.7.0 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/google/uuid v1.6.0 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/stretchr/testify v1.9.0 go: downloading github.com/scalalang2/golang-fifo v0.1.5 go: downloading github.com/tidwall/btree v1.7.0 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/gorilla/mux v1.8.0 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading google.golang.org/api v0.170.0 go: downloading cloud.google.com/go v0.112.2 go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading github.com/google/btree v1.1.2 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading golang.org/x/tools v0.20.0 go: downloading go.uber.org/mock v0.4.0 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/golang/snappy v0.0.4 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading github.com/cockroachdb/pebble v1.1.0 go: downloading github.com/jfcg/sorty/v2 v2.1.0 go: downloading github.com/carlmjohnson/flagext v0.21.0 go: downloading golang.org/x/time v0.5.0 go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading github.com/dgraph-io/ristretto v0.1.1 go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading golang.org/x/crypto v0.22.0 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2 go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading github.com/otiai10/copy v1.2.0 go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading github.com/spkg/bom v1.0.0 go: downloading github.com/xitongsys/parquet-go v1.6.0 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815 Starting Upstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3a83f0000d Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-bp09q-gthp5, pid:21325, start at 2024-05-04 16:53:09.001710805 +0800 CST m=+5.272133400 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:55:09.009 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:53:08.988 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:43:08.988 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f go: downloading github.com/dolthub/maphash v0.1.0 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/jfcg/sixb v1.3.8 go: downloading github.com/cheggaaa/pb/v3 v3.0.8 go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/robfig/cron/v3 v3.0.1 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading github.com/robfig/cron v1.2.0 go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989 go: downloading github.com/mattn/go-runewidth v0.0.15 go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/kr/text v0.2.0 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading github.com/VividCortex/ewma v1.2.0 go: downloading github.com/fatih/color v1.16.0 go: downloading github.com/mattn/go-colorable v0.1.13 go: downloading github.com/mattn/go-isatty v0.0.20 go: downloading github.com/apache/thrift v0.16.0 go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading github.com/lestrrat-go/blackmagic v1.0.2 go: downloading github.com/lestrrat-go/httprc v1.0.5 go: downloading github.com/lestrrat-go/iter v1.0.2 go: downloading github.com/lestrrat-go/option v1.0.1 go: downloading github.com/golang/glog v1.2.0 go: downloading github.com/dustin/go-humanize v1.0.1 go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/rivo/uniseg v0.4.7 go: downloading github.com/lestrrat-go/httpcc v1.0.1 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2 go: downloading github.com/ncw/directio v1.0.5 go: downloading github.com/klauspost/cpuid v1.3.1 go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible check diff failed 3-th time, retry later go: downloading github.com/go-logr/logr v1.4.1 go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/go-logr/stdr v1.2.2 wait process cdc.test exit for 1-th time... go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 go: downloading github.com/DataDog/zstd v1.5.5 wait process cdc.test exit for 2-th time... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process cdc.test exit for 3-th time... table ddl_only_block_related_table.finish_mark exists table mark.finish_mark_3 not exists for 2-th check, retry later wait process 8594 exit for 1-th time... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3a83f0000d Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-bp09q-gthp5, pid:21325, start at 2024-05-04 16:53:09.001710805 +0800 CST m=+5.272133400 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:55:09.009 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:53:08.988 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:43:08.988 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3a968c0017 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-bp09q-gthp5, pid:21414, start at 2024-05-04 16:53:10.214193993 +0800 CST m=+6.431394581 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:55:10.221 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:53:10.179 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:43:10.179 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/capture_session_done_during_task/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/capture_session_done_during_task/tiflash/log/error.log arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/capture_session_done_during_task/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/capture_session_done_during_task/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/capture_session_done_during_task/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 16:53:12 CST 2024] <<<<<< run test case force_replicate_table success! >>>>>> check diff failed 4-th time, retry later wait process 8594 exit for 2-th time... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) wait process 8594 exit for 3-th time... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils/kill_cdc_pid: line 19: kill: (8594) - No such process wait process 8594 exit for 4-th time... process 8594 already exit [Sat May 4 16:53:13 CST 2024] <<<<<< START cdc server in ddl_only_block_related_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/ExecuteNotDone=return(true)' + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_only_block_related_table.87748776.out server --log-file /tmp/tidb_cdc_test/ddl_only_block_related_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_only_block_related_table/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table mark.finish_mark_3 not exists for 3-th check, retry later go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading github.com/google/s2a-go v0.1.7 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading github.com/felixge/httpsnoop v1.0.4 go: downloading github.com/jmespath/go-jmespath v0.4.0 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.capture_session_done_during_task.cli.22830.out cli tso query --pd=http://127.0.0.1:2379 check diff failed 5-th time, retry later go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3ad64c0002 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-9z3t4-2gjxq, pid:7216, start at 2024-05-04 16:53:14.259462466 +0800 CST m=+23.279945724 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:55:14.266 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:53:14.259 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:43:14.259 +0800 All versions after safe point can be accessed. (DO NOT EDIT) VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3ad64c0002 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-9z3t4-2gjxq, pid:7216, start at 2024-05-04 16:53:14.259462466 +0800 CST m=+23.279945724 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:55:14.266 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:53:14.259 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:43:14.259 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b39d90c0015 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-9z3t4-2gjxq, pid:7301, start at 2024-05-04 16:52:58.088969003 +0800 CST m=+7.058501874 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:54:58.097 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:52:58.101 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:42:58.101 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/multi_rocks/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/multi_rocks/tiflash/log/error.log arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_rocks/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_rocks/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_rocks/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } table mark.finish_mark_3 not exists for 4-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3ae7a80013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844, pid:15634, start at 2024-05-04 16:53:15.40240644 +0800 CST m=+5.193365117 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:55:15.409 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:53:15.370 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:43:15.370 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x + tso='449527885263798273 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527885263798273 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x [Sat May 4 16:53:16 CST 2024] <<<<<< START cdc server in capture_session_done_during_task case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/processorManagerHandleNewChangefeedDelay=sleep(2000)' + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.capture_session_done_during_task.2287622878.out server --log-file /tmp/tidb_cdc_test/capture_session_done_during_task/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/capture_session_done_during_task/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379 + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/move_table/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... check diff successfully + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:53:16 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: changefeedID: default/ddl-only-block-related-table {UpstreamID:7365064747669194903 Namespace:default ID:ddl-only-block-related-table SinkURI:kafka://127.0.0.1:9092/ticdc-common-1-test-21358?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 16:53:01.602273361 +0800 CST StartTs:449527881784098821 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc003454630 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-79-gc950cce3a Epoch:449527881810313221} {CheckpointTs:449527885532758032 MinTableBarrierTs:449527885532758036 AdminJobType:noop} span: {table_id:108,start_key:7480000000000000ff6c5f720000000000fa,end_key:7480000000000000ff6c5f730000000000fa}, resolvedTs: 449527885532758036, checkpointTs: 449527885532758036, state: Replicating span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449527885532758036, checkpointTs: 449527885532758036, state: Replicating span: {table_id:110,start_key:7480000000000000ff6e5f720000000000fa,end_key:7480000000000000ff6e5f730000000000fa}, resolvedTs: 449527885532758036, checkpointTs: 449527885532758036, state: Replicating span: {table_id:112,start_key:7480000000000000ff705f720000000000fa,end_key:7480000000000000ff705f730000000000fa}, resolvedTs: 449527885532758032, checkpointTs: 449527885532758032, state: Replicating *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/472b69e4-c800-4e63-b9db-4483e319779b {"id":"472b69e4-c800-4e63-b9db-4483e319779b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812793} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ce41c3a0 472b69e4-c800-4e63-b9db-4483e319779b /tidb/cdc/default/default/changefeed/info/ddl-only-block-related-table {"upstream-id":7365064747669194903,"namespace":"default","changefeed-id":"ddl-only-block-related-table","sink-uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-21358?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T16:53:01.602273361+08:00","start-ts":449527881784098821,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-79-gc950cce3a","epoch":449527881810313221} /tidb/cdc/default/default/changefeed/status/ddl-only-block-related-table {"checkpoint-ts":449527885532758032,"min-table-barrier-ts":449527885532758036,"admin-job-type":0} /tidb/cdc/default/default/task/position/472b69e4-c800-4e63-b9db-4483e319779b/ddl-only-block-related-table {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365064747669194903 {"id":7365064747669194903,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-only-block-related-table {UpstreamID:7365064747669194903 Namespace:default ID:ddl-only-block-related-table SinkURI:kafka://127.0.0.1:9092/ticdc-common-1-test-21358?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 16:53:01.602273361 +0800 CST StartTs:449527881784098821 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc003454630 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-79-gc950cce3a Epoch:449527881810313221} {CheckpointTs:449527885532758032 MinTableBarrierTs:449527885532758036 AdminJobType:noop} span: {table_id:108,start_key:7480000000000000ff6c5f720000000000fa,end_key:7480000000000000ff6c5f730000000000fa}, resolvedTs: 449527885532758036, checkpointTs: 449527885532758036, state: Replicating span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449527885532758036, checkpointTs: 449527885532758036, state: Replicating span: {table_id:110,start_key:7480000000000000ff6e5f720000000000fa,end_key:7480000000000000ff6e5f730000000000fa}, resolvedTs: 449527885532758036, checkpointTs: 449527885532758036, state: Replicating span: {table_id:112,start_key:7480000000000000ff705f720000000000fa,end_key:7480000000000000ff705f730000000000fa}, resolvedTs: 449527885532758032, checkpointTs: 449527885532758032, state: Replicating *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/472b69e4-c800-4e63-b9db-4483e319779b {"id":"472b69e4-c800-4e63-b9db-4483e319779b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812793} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ce41c3a0 472b69e4-c800-4e63-b9db-4483e319779b /tidb/cdc/default/default/changefeed/info/ddl-only-block-related-table {"upstream-id":7365064747669194903,"namespace":"default","changefeed-id":"ddl-only-block-related-table","sink-uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-21358?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T16:53:01.602273361+08:00","start-ts":449527881784098821,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-79-gc950cce3a","epoch":449527881810313221} /tidb/cdc/default/default/changefeed/status/ddl-only-block-related-table {"checkpoint-ts":449527885532758032,"min-table-barrier-ts":449527885532758036,"admin-job-type":0} /tidb/cdc/default/default/task/position/472b69e4-c800-4e63-b9db-4483e319779b/ddl-only-block-related-table {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365064747669194903 {"id":7365064747669194903,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + echo ' *** owner info ***: *** processors info ***: changefeedID: default/ddl-only-block-related-table {UpstreamID:7365064747669194903 Namespace:default ID:ddl-only-block-related-table SinkURI:kafka://127.0.0.1:9092/ticdc-common-1-test-21358?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 16:53:01.602273361 +0800 CST StartTs:449527881784098821 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc003454630 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-79-gc950cce3a Epoch:449527881810313221} {CheckpointTs:449527885532758032 MinTableBarrierTs:449527885532758036 AdminJobType:noop} span: {table_id:108,start_key:7480000000000000ff6c5f720000000000fa,end_key:7480000000000000ff6c5f730000000000fa}, resolvedTs: 449527885532758036, checkpointTs: 449527885532758036, state: Replicating span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449527885532758036, checkpointTs: 449527885532758036, state: Replicating span: {table_id:110,start_key:7480000000000000ff6e5f720000000000fa,end_key:7480000000000000ff6e5f730000000000fa}, resolvedTs: 449527885532758036, checkpointTs: 449527885532758036, state: Replicating span: {table_id:112,start_key:7480000000000000ff705f720000000000fa,end_key:7480000000000000ff705f730000000000fa}, resolvedTs: 449527885532758032, checkpointTs: 449527885532758032, state: Replicating *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/472b69e4-c800-4e63-b9db-4483e319779b {"id":"472b69e4-c800-4e63-b9db-4483e319779b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812793} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ce41c3a0 472b69e4-c800-4e63-b9db-4483e319779b /tidb/cdc/default/default/changefeed/info/ddl-only-block-related-table {"upstream-id":7365064747669194903,"namespace":"default","changefeed-id":"ddl-only-block-related-table","sink-uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-21358?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T16:53:01.602273361+08:00","start-ts":449527881784098821,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-79-gc950cce3a","epoch":449527881810313221} /tidb/cdc/default/default/changefeed/status/ddl-only-block-related-table {"checkpoint-ts":449527885532758032,"min-table-barrier-ts":449527885532758036,"admin-job-type":0} /tidb/cdc/default/default/task/position/472b69e4-c800-4e63-b9db-4483e319779b/ddl-only-block-related-table {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365064747669194903 {"id":7365064747669194903,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + break + set +x check_ts_not_forward ddl-only-block-related-table table mark.finish_mark_3 not exists for 5-th check, retry later [Sat May 4 16:53:17 CST 2024] <<<<<< START cdc server in multi_rocks case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + GO_FAILPOINTS= + [[ no != \n\o ]] + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_rocks.2025520257.out server --log-file /tmp/tidb_cdc_test/multi_rocks/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_rocks/cdc_data --cluster-id default + (( i = 0 )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3ae7a80013 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844, pid:15634, start at 2024-05-04 16:53:15.40240644 +0800 CST m=+5.193365117 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:55:15.409 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:53:15.370 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:43:15.370 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3ae85c0014 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-r5bkm-0m844, pid:15716, start at 2024-05-04 16:53:15.459978825 +0800 CST m=+5.195295222 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:55:15.467 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:53:15.465 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:43:15.465 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... check diff failed 1-th time, retry later TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/kafka_column_selector/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/kafka_column_selector/tiflash/log/error.log arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_column_selector/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_column_selector/tiflash-proxy.toml"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_column_selector/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:53:19 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/725731af-15f4-4bc1-b4bb-d1a6c67a50ab {"id":"725731af-15f4-4bc1-b4bb-d1a6c67a50ab","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812796} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ce7888f1 725731af-15f4-4bc1-b4bb-d1a6c67a50ab /tidb/cdc/default/default/upstream/7365064810719194241 {"id":7365064810719194241,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/725731af-15f4-4bc1-b4bb-d1a6c67a50ab {"id":"725731af-15f4-4bc1-b4bb-d1a6c67a50ab","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812796} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ce7888f1 725731af-15f4-4bc1-b4bb-d1a6c67a50ab /tidb/cdc/default/default/upstream/7365064810719194241 {"id":7365064810719194241,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/725731af-15f4-4bc1-b4bb-d1a6c67a50ab {"id":"725731af-15f4-4bc1-b4bb-d1a6c67a50ab","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812796} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ce7888f1 725731af-15f4-4bc1-b4bb-d1a6c67a50ab /tidb/cdc/default/default/upstream/7365064810719194241 {"id":7365064810719194241,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x start tidb cluster in /tmp/tidb_cdc_test/move_table Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... table mark.finish_mark_3 not exists for 6-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > check diff failed 2-th time, retry later < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:53:20 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e56ae00c-8391-4377-a3d7-ea43d4f9cbb1 {"id":"e56ae00c-8391-4377-a3d7-ea43d4f9cbb1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812797} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ce42b5fa e56ae00c-8391-4377-a3d7-ea43d4f9cbb1 /tidb/cdc/default/default/upstream/7365064748513939769 {"id":7365064748513939769,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e56ae00c-8391-4377-a3d7-ea43d4f9cbb1 {"id":"e56ae00c-8391-4377-a3d7-ea43d4f9cbb1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812797} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ce42b5fa e56ae00c-8391-4377-a3d7-ea43d4f9cbb1 /tidb/cdc/default/default/upstream/7365064748513939769 {"id":7365064748513939769,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/e56ae00c-8391-4377-a3d7-ea43d4f9cbb1 {"id":"e56ae00c-8391-4377-a3d7-ea43d4f9cbb1","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812797} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ce42b5fa e56ae00c-8391-4377-a3d7-ea43d4f9cbb1 /tidb/cdc/default/default/upstream/7365064748513939769 {"id":7365064748513939769,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_rocks.cli.20721.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-multi-rocks-test-29312?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' [Sat May 4 16:53:20 CST 2024] <<<<<< START kafka consumer in capture_session_done_during_task case >>>>>> lease 22318f42ce7888f1 revoked Create changefeed successfully! ID: 9bbb2783-9328-411f-a5ce-b07b020a3768 Info: {"upstream_id":7365064748513939769,"namespace":"default","id":"9bbb2783-9328-411f-a5ce-b07b020a3768","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-rocks-test-29312?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:53:20.872784479+08:00","start_ts":449527886829060101,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527886829060101,"checkpoint_ts":449527886829060101,"checkpoint_time":"2024-05-04 16:53:20.709"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... [Sat May 4 16:53:20 CST 2024] <<<<<< START cdc server in kafka_column_selector case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_column_selector.1708917091.out server --log-file /tmp/tidb_cdc_test/kafka_column_selector/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_column_selector/cdc_data --cluster-id default + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table capture_session_done_during_task.t exists Verifying downstream PD is started... Starting Upstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release Starting Downstream TiKV... TiKV Release Version: 8.2.0-alpha Edition: Community Git Commit Hash: 72a0fd5b00235a7c56014b77ddd933e2a0d33c88 Git Commit Branch: master UTC Build Time: 2024-04-30 02:23:51 Rust Version: rustc 1.77.0-nightly (89e2160c4 2023-12-27) Enable Features: memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored Profile: dist_release check diff failed 1-th time, retry later table mark.finish_mark_3 not exists for 7-th check, retry later run task failed 1-th time, retry later + set +x [Sat May 4 16:53:22 CST 2024] <<<<<< START kafka consumer in multi_rocks case >>>>>> check diff failed 3-th time, retry later ***************** properties ***************** "mysql.db"="multi_rocks" "recordcount"="1000" "table"="a1" "updateproportion"="0" "mysql.port"="4000" "readallfields"="true" "dotransactions"="false" "scanproportion"="0" "requestdistribution"="uniform" "mysql.host"="127.0.0.1" "readproportion"="0" "operationcount"="0" "insertproportion"="0" "threadcount"="2" "mysql.user"="root" "workload"="core" ********************************************** Starting Upstream TiDB... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > table mark.finish_mark_3 not exists for 8-th check, retry later Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Starting Downstream TiDB... Release Version: v8.2.0-alpha-79-g600b2ed4bf Edition: Community Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c Git Branch: master UTC Build Time: 2024-05-01 02:56:48 GoVersion: go1.21.6 Race Enabled: false Check Table Before Drop: false Store: unistore Verifying Upstream TiDB is started... ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:53:23 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/84a8612d-3698-4203-92f3-e47acb1a4182 {"id":"84a8612d-3698-4203-92f3-e47acb1a4182","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812801} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ce8fe7ce 84a8612d-3698-4203-92f3-e47acb1a4182 /tidb/cdc/default/default/upstream/7365064839201833775 {"id":7365064839201833775,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/84a8612d-3698-4203-92f3-e47acb1a4182 {"id":"84a8612d-3698-4203-92f3-e47acb1a4182","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812801} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ce8fe7ce 84a8612d-3698-4203-92f3-e47acb1a4182 /tidb/cdc/default/default/upstream/7365064839201833775 {"id":7365064839201833775,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/84a8612d-3698-4203-92f3-e47acb1a4182 {"id":"84a8612d-3698-4203-92f3-e47acb1a4182","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812801} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ce8fe7ce 84a8612d-3698-4203-92f3-e47acb1a4182 /tidb/cdc/default/default/upstream/7365064839201833775 {"id":7365064839201833775,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_column_selector.cli.17150.out cli changefeed create --start-ts=449527886805729281 '--sink-uri=kafka://127.0.0.1:9092/column-selector-test?protocol=canal-json&partition-num=1&enable-tidb-extension=true' -c test --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_column_selector/conf/changefeed.toml Run finished, takes 544.529568ms INSERT - Takes(s): 0.5, Count: 1000, OPS: 1872.4, Avg(us): 1058, Min(us): 771, Max(us): 10103, 95th(us): 2000, 99th(us): 2000 check_ts_not_forward ddl-only-block-related-table [2024/05/04 16:53:20.761 +08:00] [WARN] [diff.go:182] ["table struct is not equal"] [reason="column num not equal, one is 5 another is 4"] check diff failed 2-th time, retry later \033[0;36m<<< Run all test success >>>\033[0m Create changefeed successfully! ID: test Info: {"upstream_id":7365064839201833775,"namespace":"default","id":"test","sink_uri":"kafka://127.0.0.1:9092/column-selector-test?protocol=canal-json\u0026partition-num=1\u0026enable-tidb-extension=true","create_time":"2024-05-04T16:53:24.238678558+08:00","start_ts":449527886805729281,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"column_selectors":[{"matcher":["test.t1"],"columns":["a","b"]},{"matcher":["test.*"],"columns":["*","!b"]},{"matcher":["test1.t1"],"columns":["column*","!column1"]}],"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527886805729281,"checkpoint_ts":449527886805729281,"checkpoint_time":"2024-05-04 16:53:20.620"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc already exists) ***************** properties ***************** "mysql.user"="root" "recordcount"="1000" "readallfields"="true" "operationcount"="0" "threadcount"="2" "mysql.port"="4000" "mysql.db"="multi_rocks" "updateproportion"="0" "insertproportion"="0" "mysql.host"="127.0.0.1" "workload"="core" "readproportion"="0" "requestdistribution"="uniform" "table"="a2" "dotransactions"="false" "scanproportion"="0" ********************************************** [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // timeout [Pipeline] } check diff failed 4-th time, retry later [Pipeline] // stage [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] } Run finished, takes 569.241629ms INSERT - Takes(s): 0.6, Count: 1000, OPS: 1804.1, Avg(us): 1106, Min(us): 875, Max(us): 14486, 95th(us): 2000, 99th(us): 2000 [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] } ***************** properties ***************** "operationcount"="0" "mysql.db"="multi_rocks" "scanproportion"="0" "mysql.host"="127.0.0.1" "dotransactions"="false" "mysql.user"="root" "workload"="core" "requestdistribution"="uniform" "readallfields"="true" "readproportion"="0" "threadcount"="2" "updateproportion"="0" "recordcount"="1000" "mysql.port"="4000" "table"="a3" "insertproportion"="0" ********************************************** table mark.finish_mark_3 not exists for 9-th check, retry later ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) + set +x Run finished, takes 548.929288ms INSERT - Takes(s): 0.5, Count: 1000, OPS: 1873.1, Avg(us): 1069, Min(us): 778, Max(us): 14976, 95th(us): 2000, 99th(us): 3000 check diff failed 3-th time, retry later Starting build checksum checker... go: downloading go.uber.org/zap v1.27.0 go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4 go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d go: downloading golang.org/x/net v0.24.0 go: downloading github.com/spf13/cobra v1.8.0 go: downloading github.com/BurntSushi/toml v1.3.2 go: downloading github.com/go-sql-driver/mysql v1.7.1 go: downloading google.golang.org/grpc v1.62.1 go: downloading github.com/klauspost/compress v1.17.8 go: downloading github.com/pierrec/lz4/v4 v4.1.18 go: downloading github.com/xdg/scram v1.0.5 go: downloading github.com/gin-gonic/gin v1.9.1 go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b go: downloading github.com/apache/pulsar-client-go v0.11.0 go: downloading github.com/IBM/sarama v1.41.2 go: downloading github.com/aws/aws-sdk-go-v2 v1.19.1 go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e go: downloading github.com/stretchr/testify v1.9.0 go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5 go: downloading golang.org/x/time v0.5.0 go: downloading github.com/prometheus/client_golang v1.19.0 go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c go: downloading github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 go: downloading golang.org/x/sync v0.7.0 go: downloading github.com/phayes/freeport v0.0.0-20180830031419-95f893ade6f2 go: downloading github.com/modern-go/reflect2 v1.0.2 go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5 go: downloading github.com/tinylib/msgp v1.1.6 go: downloading cloud.google.com/go/storage v1.39.1 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1 go: downloading github.com/KimMachineGun/automemlimit v0.2.4 go: downloading github.com/aws/aws-sdk-go v1.50.0 go: downloading github.com/json-iterator/go v1.1.12 go: downloading github.com/shirou/gopsutil/v3 v3.24.2 go: downloading github.com/gogo/protobuf v1.3.2 go: downloading github.com/coreos/go-semver v0.3.1 go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754 go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1 go: downloading github.com/containerd/cgroups v1.0.4 go: downloading github.com/xdg/stringprep v1.0.3 go: downloading golang.org/x/crypto v0.22.0 go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b go: downloading github.com/spf13/pflag v1.0.5 go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 go: downloading github.com/stretchr/objx v0.5.2 go: downloading github.com/philhofer/fwd v1.1.1 go: downloading golang.org/x/text v0.14.0 go: downloading github.com/gin-contrib/sse v0.1.0 go: downloading github.com/mattn/go-isatty v0.0.20 check diff failed 5-th time, retry later ***************** properties ***************** "table"="a4" "mysql.user"="root" "readallfields"="true" "scanproportion"="0" "mysql.port"="4000" "requestdistribution"="uniform" "operationcount"="0" "workload"="core" "updateproportion"="0" "dotransactions"="false" "insertproportion"="0" "mysql.db"="multi_rocks" "readproportion"="0" "threadcount"="2" "recordcount"="1000" "mysql.host"="127.0.0.1" ********************************************** go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd go: downloading gopkg.in/yaml.v3 v3.0.1 go: downloading cloud.google.com/go v0.112.2 go: downloading github.com/coreos/go-systemd/v22 v22.5.0 go: downloading github.com/docker/go-units v0.5.0 go: downloading github.com/godbus/dbus/v5 v5.0.4 go: downloading github.com/opencontainers/runtime-spec v1.0.2 go: downloading golang.org/x/sys v0.19.0 go: downloading github.com/cilium/ebpf v0.4.0 go: downloading github.com/sirupsen/logrus v1.9.3 go: downloading go.uber.org/multierr v1.11.0 go: downloading github.com/go-playground/validator/v10 v10.14.0 go: downloading github.com/pelletier/go-toml/v2 v2.0.8 go: downloading github.com/ugorji/go/codec v1.2.11 go: downloading google.golang.org/protobuf v1.33.0 go: downloading github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 go: downloading github.com/beorn7/perks v1.0.1 go: downloading github.com/cespare/xxhash/v2 v2.3.0 go: downloading github.com/prometheus/client_model v0.6.1 go: downloading github.com/prometheus/common v0.52.2 go: downloading github.com/prometheus/procfs v0.13.0 go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1 go: downloading github.com/aws/smithy-go v1.13.5 go: downloading github.com/eapache/go-resiliency v1.4.0 go: downloading github.com/eapache/queue v1.1.0 go: downloading github.com/hashicorp/go-multierror v1.1.1 go: downloading github.com/eapache/go-xerial-snappy v0.0.0-20230731223053-c322873962e3 go: downloading github.com/jcmturner/gofork v1.7.6 go: downloading github.com/jcmturner/gokrb5/v8 v8.4.4 go: downloading github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 go: downloading github.com/bits-and-blooms/bitset v1.4.0 go: downloading github.com/linkedin/goavro/v2 v2.11.1 go: downloading github.com/pkg/errors v0.9.1 go: downloading github.com/golang/protobuf v1.5.4 go: downloading cloud.google.com/go/compute/metadata v0.2.3 go: downloading cloud.google.com/go/iam v1.1.7 go: downloading cloud.google.com/go/compute v1.25.1 go: downloading github.com/google/uuid v1.6.0 go: downloading github.com/googleapis/gax-go/v2 v2.12.3 go: downloading golang.org/x/oauth2 v0.18.0 go: downloading google.golang.org/api v0.170.0 go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda Run finished, takes 531.640962ms INSERT - Takes(s): 0.5, Count: 1000, OPS: 1940.1, Avg(us): 1032, Min(us): 741, Max(us): 20386, 95th(us): 2000, 99th(us): 2000 go: downloading github.com/go-playground/universal-translator v0.18.1 go: downloading github.com/gabriel-vasile/mimetype v1.4.2 go: downloading github.com/leodido/go-urn v1.2.4 go: downloading github.com/hashicorp/errwrap v1.0.0 go: downloading github.com/golang/snappy v0.0.4 go: downloading github.com/AthenZ/athenz v1.10.39 go: downloading github.com/spaolacci/murmur3 v1.1.0 go: downloading golang.org/x/mod v0.17.0 go: downloading github.com/DataDog/zstd v1.5.5 go: downloading github.com/pierrec/lz4 v2.6.1+incompatible go: downloading github.com/jcmturner/dnsutils/v2 v2.0.0 go: downloading github.com/hashicorp/go-uuid v1.0.3 go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac go: downloading go.opentelemetry.io/otel v1.24.0 go: downloading go.opentelemetry.io/otel/trace v1.24.0 go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda go: downloading github.com/opentracing/opentracing-go v1.2.0 go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible go: downloading github.com/go-playground/locales v0.14.1 go: downloading github.com/99designs/keyring v1.2.1 go: downloading github.com/jcmturner/rpc/v2 v2.0.3 go: downloading github.com/dvsekhvalnov/jose2go v1.5.0 go: downloading github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c go: downloading github.com/mtibben/percent v0.2.1 table mark.finish_mark_3 not exists for 10-th check, retry later go: downloading golang.org/x/term v0.19.0 go: downloading github.com/jcmturner/aescts/v2 v2.0.0 go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da go: downloading go.opentelemetry.io/otel/metric v1.24.0 go: downloading github.com/go-logr/logr v1.4.1 go: downloading github.com/go-logr/stdr v1.2.2 ***************** properties ***************** "threadcount"="2" "dotransactions"="false" "mysql.host"="127.0.0.1" "mysql.user"="root" "mysql.db"="multi_rocks" "insertproportion"="0" "updateproportion"="0" "mysql.port"="4000" "scanproportion"="0" "requestdistribution"="uniform" "operationcount"="0" "table"="a5" "readproportion"="0" "workload"="core" "readallfields"="true" "recordcount"="1000" ********************************************** check diff successfully check diff failed 1-th time, retry later Run finished, takes 510.050402ms INSERT - Takes(s): 0.5, Count: 1000, OPS: 2036.0, Avg(us): 991, Min(us): 735, Max(us): 18787, 95th(us): 2000, 99th(us): 2000 ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) go: downloading github.com/ardielle/ardielle-go v1.5.2 check diff successfully go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548 go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8 go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0 go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581 go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1 go: downloading github.com/go-resty/resty/v2 v2.11.0 go: downloading github.com/coocood/freecache v1.2.1 go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9 go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50 go: downloading github.com/google/btree v1.1.2 go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a go: downloading github.com/jellydator/ttlcache/v3 v3.0.1 go: downloading go.etcd.io/etcd/client/v3 v3.5.12 go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5 go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible go: downloading github.com/dolthub/swiss v0.2.1 go: downloading github.com/twmb/murmur3 v1.1.6 go: downloading go.etcd.io/etcd/api/v3 v3.5.12 go: downloading github.com/opentracing/basictracer-go v1.1.0 go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22 go: downloading github.com/influxdata/tdigest v0.0.1 go: downloading github.com/cloudfoundry/gosigar v1.3.6 go: downloading golang.org/x/tools v0.20.0 go: downloading github.com/cockroachdb/errors v1.11.1 go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7 go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13 go: downloading gopkg.in/yaml.v2 v2.4.0 go: downloading github.com/dolthub/maphash v0.1.0 go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117 go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec go: downloading github.com/tklauser/go-sysconf v0.3.12 go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1 go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4 go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12 go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 go: downloading github.com/tklauser/numcpus v0.6.1 go: downloading github.com/kylelemons/godebug v1.1.0 go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c go: downloading github.com/golang-jwt/jwt/v5 v5.2.0 go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible go: downloading github.com/cockroachdb/redact v1.1.5 go: downloading github.com/getsentry/sentry-go v0.27.0 go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b go: downloading github.com/kr/pretty v0.3.1 go: downloading github.com/rogpeppe/go-internal v1.12.0 go: downloading github.com/kr/text v0.2.0 table mark.finish_mark_3 not exists for 11-th check, retry later table multi_rocks.finish_mark not exists for 1-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3bbae40010 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tgv7p-w9730, pid:23896, start at 2024-05-04 16:53:28.908623002 +0800 CST m=+5.226045823 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:55:28.914 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:53:28.889 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:43:28.889 +0800 All versions after safe point can be accessed. (DO NOT EDIT) ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111) check diff failed 1-th time, retry later check diff successfully wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... table mark.finish_mark_3 not exists for 12-th check, retry later table multi_rocks.finish_mark not exists for 2-th check, retry later VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3bbae40010 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tgv7p-w9730, pid:23896, start at 2024-05-04 16:53:28.908623002 +0800 CST m=+5.226045823 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:55:28.914 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:53:28.889 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:43:28.889 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Verifying Downstream TiDB is started... VARIABLE_NAME VARIABLE_VALUE COMMENT bootstrapped True Bootstrap flag. Do not delete. tidb_server_version 196 Bootstrap version. Do not delete. system_tz Asia/Shanghai TiDB Global System Timezone. new_collation_enabled True If the new collations are enabled. Do not edit it. ddl_table_version 3 DDL Table Version. Do not delete. tikv_gc_leader_uuid 63d0b3bc6840006 Current GC worker leader UUID. (DO NOT EDIT) tikv_gc_leader_desc host:pingcap-tiflow-pull-cdc-integration-kafka-test-1836-tgv7p-w9730, pid:23978, start at 2024-05-04 16:53:29.636522342 +0800 CST m=+5.899933497 Host name and pid of current GC leader. (DO NOT EDIT) tikv_gc_leader_lease 20240504-16:55:29.642 +0800 Current GC worker leader lease. (DO NOT EDIT) tikv_gc_auto_concurrency true Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used tikv_gc_enable true Current GC enable status tikv_gc_run_interval 10m0s GC run interval, at least 10m, in Go format. tikv_gc_life_time 10m0s All versions within life time will not be collected by GC, at least 10m, in Go format. tikv_gc_last_run_time 20240504-16:53:29.633 +0800 The time when last GC starts. (DO NOT EDIT) tikv_gc_safe_point 20240504-16:43:29.633 +0800 All versions after safe point can be accessed. (DO NOT EDIT) Starting Upstream TiFlash... TiFlash Release Version: v8.2.0-alpha-16-g8e170090f Edition: Community Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02 Git Branch: HEAD UTC Build Time: 2024-04-30 02:34:21 Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto Profile: RELWITHDEBINFO Compiler: clang++ 13.0.0 Raft Proxy Git Commit Hash: 7dc50b4eb06124e31f03adb06c20ff7ab61c5f79 Git Commit Branch: HEAD UTC Build Time: 2024-04-30 02:38:45 Rust Version: rustc 1.67.0-nightly (96ddd32c4 2022-11-14) Storage Engine: tiflash Prometheus Prefix: tiflash_proxy_ Profile: release Enable Features: external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored Verifying Upstream TiFlash is started... Logging trace to /tmp/tidb_cdc_test/move_table/tiflash/log/server.log Logging errors to /tmp/tidb_cdc_test/move_table/tiflash/log/error.log arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/move_table/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/move_table/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/move_table/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash --engine-label --engine-version ") } cdc.test: no process found wait process cdc.test exit for 3-th time... process cdc.test already exit [Sat May 4 16:53:31 CST 2024] <<<<<< run test case capture_session_done_during_task success! >>>>>> go: downloading github.com/jmespath/go-jmespath v0.4.0 go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2 go: downloading github.com/google/s2a-go v0.1.7 go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0 go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 go: downloading github.com/felixge/httpsnoop v1.0.4 check diff failed 2-th time, retry later table mark.finish_mark_3 not exists for 13-th check, retry later table multi_rocks.finish_mark not exists for 3-th check, retry later + pd_host=127.0.0.1 + pd_port=2379 + is_tls=false + '[' false == true ']' ++ run_cdc_cli tso query --pd=http://127.0.0.1:2379 + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.move_table.cli.25444.out cli tso query --pd=http://127.0.0.1:2379 check diff failed 3-th time, retry later table mark.finish_mark_3 not exists for 14-th check, retry later + set +x + tso='449527890402082817 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...' + echo 449527890402082817 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/... + awk -F ' ' '{print $1}' + set +x ***************** properties ***************** "insertproportion"="0" "mysql.host"="127.0.0.1" "readallfields"="true" "workload"="core" "scanproportion"="0" "requestdistribution"="uniform" "threadcount"="10" "mysql.db"="move_table" "operationcount"="0" "recordcount"="10000" "dotransactions"="false" "updateproportion"="0" "mysql.user"="root" "mysql.port"="4000" "readproportion"="0" ********************************************** table multi_rocks.finish_mark exists check diff successfully wait process cdc.test exit for 1-th time... check diff failed 4-th time, retry later wait process cdc.test exit for 2-th time... cdc.test: no process found wait process cdc.test exit for 3-th time... Run finished, takes 1.244365204s INSERT - Takes(s): 1.2, Count: 10000, OPS: 8063.4, Avg(us): 1204, Min(us): 769, Max(us): 5841, 95th(us): 2000, 99th(us): 2000 [Sat May 4 16:53:37 CST 2024] <<<<<< START cdc server in move_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + (( i = 0 )) + (( i <= 50 )) + GO_FAILPOINTS= + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.move_table.2552925531.out server --log-file /tmp/tidb_cdc_test/move_table/cdc1.log --log-level debug --data-dir /tmp/tidb_cdc_test/move_table/cdc_data1 --cluster-id default --addr 127.0.0.1:8300 ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8300; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table mark.finish_mark_3 not exists for 15-th check, retry later process cdc.test already exit [Sat May 4 16:53:37 CST 2024] <<<<<< run test case multi_rocks success! >>>>>> check diff failed 5-th time, retry later table mark.finish_mark_3 not exists for 16-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8300 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8300 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:53:40 GMT < Content-Length: 815 < Content-Type: text/plain; charset=utf-8 < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b6472395-9764-47f0-8976-fcd884079c38 {"id":"b6472395-9764-47f0-8976-fcd884079c38","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812817} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ceca5ff0 b6472395-9764-47f0-8976-fcd884079c38 /tidb/cdc/default/default/upstream/7365064896431326437 {"id":7365064896431326437,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b6472395-9764-47f0-8976-fcd884079c38 {"id":"b6472395-9764-47f0-8976-fcd884079c38","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812817} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ceca5ff0 b6472395-9764-47f0-8976-fcd884079c38 /tidb/cdc/default/default/upstream/7365064896431326437 {"id":7365064896431326437,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** owner info ***: *** processors info ***: *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/b6472395-9764-47f0-8976-fcd884079c38 {"id":"b6472395-9764-47f0-8976-fcd884079c38","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812817} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ceca5ff0 b6472395-9764-47f0-8976-fcd884079c38 /tidb/cdc/default/default/upstream/7365064896431326437 {"id":7365064896431326437,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + break + set +x + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.move_table.cli.25586.out cli changefeed create --start-ts=449527890402082817 '--sink-uri=kafka://127.0.0.1:9092/ticdc-move-table-test-29206?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' Create changefeed successfully! ID: 29d389b7-5285-4ea5-9180-cfb488a5692b Info: {"upstream_id":7365064896431326437,"namespace":"default","id":"29d389b7-5285-4ea5-9180-cfb488a5692b","sink_uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-29206?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-04T16:53:40.7872657+08:00","start_ts":449527890402082817,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-79-gc950cce3a","resolved_ts":449527890402082817,"checkpoint_ts":449527890402082817,"checkpoint_time":"2024-05-04 16:53:34.339"} PASS coverage: 2.4% of statements in github.com/pingcap/tiflow/... check diff successfully wait process cdc.test exit for 1-th time... table mark.finish_mark_3 not exists for 17-th check, retry later wait process cdc.test exit for 2-th time... [2024/05/04 16:53:39.464 +08:00] [INFO] [dailytest.go:68] ["test pass!!!"] wait process cdc.test exit for 1-th time... wait process cdc.test exit for 2-th time... wait process cdc.test exit for 3-th time... cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 16:53:41 CST 2024] <<<<<< run test case cdc success! >>>>>> + set +x [Sat May 4 16:53:42 CST 2024] <<<<<< START kafka consumer in move_table case >>>>>> [Sat May 4 16:53:42 CST 2024] <<<<<< START cdc server in move_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.move_table.2562725629.out server --log-file /tmp/tidb_cdc_test/move_table/cdc2.log --log-level debug --data-dir /tmp/tidb_cdc_test/move_table/cdc_data2 --cluster-id default --addr 127.0.0.1:8301 ++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8301 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8301; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 wait process cdc.test exit for 3-th time... cdc.test: no process found wait process cdc.test exit for 4-th time... process cdc.test already exit [Sat May 4 16:53:43 CST 2024] <<<<<< run test case changefeed_pause_resume success! >>>>>> table mark.finish_mark_3 not exists for 18-th check, retry later =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/ddl_attributes/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8301 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8301 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8301 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:53:45 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** processors info ***: changefeedID: default/29d389b7-5285-4ea5-9180-cfb488a5692b {UpstreamID:7365064896431326437 Namespace:default ID:29d389b7-5285-4ea5-9180-cfb488a5692b SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-29206?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 16:53:40.7872657 +0800 CST StartTs:449527890402082817 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc001a00900 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-79-gc950cce3a Epoch:449527892079804423} {CheckpointTs:449527890834358346 MinTableBarrierTs:449527893128380421 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a29501a2-63b4-42a6-840c-0587130c3111 {"id":"a29501a2-63b4-42a6-840c-0587130c3111","address":"127.0.0.1:8301","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812822} /tidb/cdc/default/__cdc_meta__/capture/b6472395-9764-47f0-8976-fcd884079c38 {"id":"b6472395-9764-47f0-8976-fcd884079c38","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812817} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ceca5ff0 b6472395-9764-47f0-8976-fcd884079c38 /tidb/cdc/default/__cdc_meta__/owner/22318f42ceca6045 a29501a2-63b4-42a6-840c-0587130c3111 /tidb/cdc/default/default/changefeed/info/29d389b7-5285-4ea5-9180-cfb488a5692b {"upstream-id":7365064896431326437,"namespace":"default","changefeed-id":"29d389b7-5285-4ea5-9180-cfb488a5692b","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-29206?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T16:53:40.7872657+08:00","start-ts":449527890402082817,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-79-gc950cce3a","epoch":449527892079804423} /tidb/cdc/default/default/changefeed/status/29d389b7-5285-4ea5-9180-cfb488a5692b {"checkpoint-ts":449527890834358346,"min-table-barrier-ts":449527893128380421,"admin-job-type":0} /tidb/cdc/default/default/task/position/a29501a2-63b4-42a6-840c-0587130c3111/29d389b7-5285-4ea5-9180-cfb488a5692b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/b6472395-9764-47f0-8976-fcd884079c38/29d389b7-5285-4ea5-9180-cfb488a5692b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365064896431326437 {"id":7365064896431326437,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** processors info ***: changefeedID: default/29d389b7-5285-4ea5-9180-cfb488a5692b {UpstreamID:7365064896431326437 Namespace:default ID:29d389b7-5285-4ea5-9180-cfb488a5692b SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-29206?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 16:53:40.7872657 +0800 CST StartTs:449527890402082817 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc001a00900 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-79-gc950cce3a Epoch:449527892079804423} {CheckpointTs:449527890834358346 MinTableBarrierTs:449527893128380421 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a29501a2-63b4-42a6-840c-0587130c3111 {"id":"a29501a2-63b4-42a6-840c-0587130c3111","address":"127.0.0.1:8301","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812822} /tidb/cdc/default/__cdc_meta__/capture/b6472395-9764-47f0-8976-fcd884079c38 {"id":"b6472395-9764-47f0-8976-fcd884079c38","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812817} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ceca5ff0 b6472395-9764-47f0-8976-fcd884079c38 /tidb/cdc/default/__cdc_meta__/owner/22318f42ceca6045 a29501a2-63b4-42a6-840c-0587130c3111 /tidb/cdc/default/default/changefeed/info/29d389b7-5285-4ea5-9180-cfb488a5692b {"upstream-id":7365064896431326437,"namespace":"default","changefeed-id":"29d389b7-5285-4ea5-9180-cfb488a5692b","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-29206?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T16:53:40.7872657+08:00","start-ts":449527890402082817,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-79-gc950cce3a","epoch":449527892079804423} /tidb/cdc/default/default/changefeed/status/29d389b7-5285-4ea5-9180-cfb488a5692b {"checkpoint-ts":449527890834358346,"min-table-barrier-ts":449527893128380421,"admin-job-type":0} /tidb/cdc/default/default/task/position/a29501a2-63b4-42a6-840c-0587130c3111/29d389b7-5285-4ea5-9180-cfb488a5692b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/b6472395-9764-47f0-8976-fcd884079c38/29d389b7-5285-4ea5-9180-cfb488a5692b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365064896431326437 {"id":7365064896431326437,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + echo ' *** processors info ***: changefeedID: default/29d389b7-5285-4ea5-9180-cfb488a5692b {UpstreamID:7365064896431326437 Namespace:default ID:29d389b7-5285-4ea5-9180-cfb488a5692b SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-29206?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 16:53:40.7872657 +0800 CST StartTs:449527890402082817 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc001a00900 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-79-gc950cce3a Epoch:449527892079804423} {CheckpointTs:449527890834358346 MinTableBarrierTs:449527893128380421 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/a29501a2-63b4-42a6-840c-0587130c3111 {"id":"a29501a2-63b4-42a6-840c-0587130c3111","address":"127.0.0.1:8301","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812822} /tidb/cdc/default/__cdc_meta__/capture/b6472395-9764-47f0-8976-fcd884079c38 {"id":"b6472395-9764-47f0-8976-fcd884079c38","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812817} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ceca5ff0 b6472395-9764-47f0-8976-fcd884079c38 /tidb/cdc/default/__cdc_meta__/owner/22318f42ceca6045 a29501a2-63b4-42a6-840c-0587130c3111 /tidb/cdc/default/default/changefeed/info/29d389b7-5285-4ea5-9180-cfb488a5692b {"upstream-id":7365064896431326437,"namespace":"default","changefeed-id":"29d389b7-5285-4ea5-9180-cfb488a5692b","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-29206?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T16:53:40.7872657+08:00","start-ts":449527890402082817,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-79-gc950cce3a","epoch":449527892079804423} /tidb/cdc/default/default/changefeed/status/29d389b7-5285-4ea5-9180-cfb488a5692b {"checkpoint-ts":449527890834358346,"min-table-barrier-ts":449527893128380421,"admin-job-type":0} /tidb/cdc/default/default/task/position/a29501a2-63b4-42a6-840c-0587130c3111/29d389b7-5285-4ea5-9180-cfb488a5692b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/b6472395-9764-47f0-8976-fcd884079c38/29d389b7-5285-4ea5-9180-cfb488a5692b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365064896431326437 {"id":7365064896431326437,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + break + set +x [Sat May 4 16:53:45 CST 2024] <<<<<< START cdc server in move_table case >>>>>> + [[ '' == \t\r\u\e ]] + set +e + get_info_fail_msg='failed to get info:' + etcd_info_msg='etcd info' + '[' -z '' ']' + curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL' + [[ no != \n\o ]] + GO_FAILPOINTS= + (( i = 0 )) + (( i <= 50 )) + cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.move_table.2573525737.out server --log-file /tmp/tidb_cdc_test/move_table/cdc3.log --log-level debug --data-dir /tmp/tidb_cdc_test/move_table/cdc_data3 --cluster-id default --addr 127.0.0.1:8302 ++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8302 (#0) * Trying 127.0.0.1... * Connection refused * Failed connect to 127.0.0.1:8302; Connection refused * Closing connection 0 + res= + echo '' + grep -q 'failed to get info:' + echo '' + grep -q 'etcd info' + '[' 0 -eq 50 ']' + sleep 3 table mark.finish_mark_3 not exists for 19-th check, retry later ++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0 100 221 100 221 0 0 2573 0 --:--:-- --:--:-- --:--:-- 2600 + synced_status='{"synced":true,"sink_checkpoint_ts":"2024-05-04 16:53:33.480","puller_resolved_ts":"2024-05-04 16:53:26.481","last_synced_ts":"2024-05-04 16:51:18.180","now_ts":"2024-05-04 16:53:35.000","info":"Data syncing is finished"}' ++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-04' '16:53:33.480","puller_resolved_ts":"2024-05-04' '16:53:26.481","last_synced_ts":"2024-05-04' '16:51:18.180","now_ts":"2024-05-04' '16:53:35.000","info":"Data' syncing is 'finished"}' ++ jq .synced + status=true + '[' true '!=' true ']' + kill_pd ++ ps aux ++ grep pd-server ++ grep /tmp/tidb_cdc_test/synced_status_with_redo + info='jenkins 9966 7.8 0.0 13718932 142680 ? Sl 16:50 0:12 pd-server --advertise-client-urls http://127.0.0.1:2379 --client-urls http://0.0.0.0:2379 --advertise-peer-urls http://127.0.0.1:2380 --peer-urls http://0.0.0.0:2380 --config /tmp/tidb_cdc_test/synced_status_with_redo/pd-config.toml --log-file /tmp/tidb_cdc_test/synced_status_with_redo/pd1.log --data-dir /tmp/tidb_cdc_test/synced_status_with_redo/pd1 --name=pd1 --initial-cluster=pd1=http://127.0.0.1:2380 jenkins 10032 5.3 0.0 13522132 137356 ? Sl 16:50 0:08 pd-server --advertise-client-urls http://127.0.0.1:2479 --client-urls http://0.0.0.0:2479 --advertise-peer-urls http://127.0.0.1:2480 --peer-urls http://0.0.0.0:2480 --config /tmp/tidb_cdc_test/synced_status_with_redo/pd-config.toml --log-file /tmp/tidb_cdc_test/synced_status_with_redo/down_pd.log --data-dir /tmp/tidb_cdc_test/synced_status_with_redo/down_pd' ++ ps aux ++ grep pd-server ++ grep /tmp/tidb_cdc_test/synced_status_with_redo ++ awk '{print $2}' ++ xargs kill -9 + sleep 20 {"level":"warn","ts":1714812820.6267483,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00334ec40/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"info","ts":1714812820.626817,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"} {"level":"warn","ts":1714812820.7512918,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0037dd180/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""} {"level":"info","ts":1714812820.7513542,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"} {"level":"warn","ts":1714812821.525954,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001f46e00/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"info","ts":1714812821.526027,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"} {"level":"warn","ts":"2024-05-04T16:53:45.400056+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000ec5500/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-05-04T16:53:45.400193+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001230000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-05-04T16:53:45.452132+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000efe1c0/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""} table mark.finish_mark_3 not exists for 20-th check, retry later + (( i++ )) + (( i <= 50 )) ++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL * About to connect() to 127.0.0.1 port 8302 (#0) * Trying 127.0.0.1... * Connected to 127.0.0.1 (127.0.0.1) port 8302 (#0) * Server auth using Basic with user 'ticdc' > GET /debug/info HTTP/1.1 > Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0 > User-Agent: curl/7.29.0 > Host: 127.0.0.1:8302 > Accept: */* > < HTTP/1.1 200 OK < Date: Sat, 04 May 2024 08:53:48 GMT < Content-Type: text/plain; charset=utf-8 < Transfer-Encoding: chunked < { [data not shown] * Connection #0 to host 127.0.0.1 left intact + res=' *** processors info ***: changefeedID: default/29d389b7-5285-4ea5-9180-cfb488a5692b {UpstreamID:7365064896431326437 Namespace:default ID:29d389b7-5285-4ea5-9180-cfb488a5692b SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-29206?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 16:53:40.7872657 +0800 CST StartTs:449527890402082817 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc00273e990 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-79-gc950cce3a Epoch:449527892079804423} {CheckpointTs:449527893914812423 MinTableBarrierTs:449527893914812423 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/4c34fd7e-7672-4395-849c-7eb15b1a8b90 {"id":"4c34fd7e-7672-4395-849c-7eb15b1a8b90","address":"127.0.0.1:8302","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812825} /tidb/cdc/default/__cdc_meta__/capture/a29501a2-63b4-42a6-840c-0587130c3111 {"id":"a29501a2-63b4-42a6-840c-0587130c3111","address":"127.0.0.1:8301","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812822} /tidb/cdc/default/__cdc_meta__/capture/b6472395-9764-47f0-8976-fcd884079c38 {"id":"b6472395-9764-47f0-8976-fcd884079c38","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812817} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ceca5ff0 b6472395-9764-47f0-8976-fcd884079c38 /tidb/cdc/default/__cdc_meta__/owner/22318f42ceca6045 a29501a2-63b4-42a6-840c-0587130c3111 /tidb/cdc/default/__cdc_meta__/owner/22318f42ceca6077 4c34fd7e-7672-4395-849c-7eb15b1a8b90 /tidb/cdc/default/default/changefeed/info/29d389b7-5285-4ea5-9180-cfb488a5692b {"upstream-id":7365064896431326437,"namespace":"default","changefeed-id":"29d389b7-5285-4ea5-9180-cfb488a5692b","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-29206?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T16:53:40.7872657+08:00","start-ts":449527890402082817,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-79-gc950cce3a","epoch":449527892079804423} /tidb/cdc/default/default/changefeed/status/29d389b7-5285-4ea5-9180-cfb488a5692b {"checkpoint-ts":449527893914812423,"min-table-barrier-ts":449527893914812423,"admin-job-type":0} /tidb/cdc/default/default/task/position/4c34fd7e-7672-4395-849c-7eb15b1a8b90/29d389b7-5285-4ea5-9180-cfb488a5692b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/a29501a2-63b4-42a6-840c-0587130c3111/29d389b7-5285-4ea5-9180-cfb488a5692b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/b6472395-9764-47f0-8976-fcd884079c38/29d389b7-5285-4ea5-9180-cfb488a5692b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365064896431326437 {"id":7365064896431326437,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'failed to get info:' + echo ' *** processors info ***: changefeedID: default/29d389b7-5285-4ea5-9180-cfb488a5692b {UpstreamID:7365064896431326437 Namespace:default ID:29d389b7-5285-4ea5-9180-cfb488a5692b SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-29206?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 16:53:40.7872657 +0800 CST StartTs:449527890402082817 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc00273e990 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-79-gc950cce3a Epoch:449527892079804423} {CheckpointTs:449527893914812423 MinTableBarrierTs:449527893914812423 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/4c34fd7e-7672-4395-849c-7eb15b1a8b90 {"id":"4c34fd7e-7672-4395-849c-7eb15b1a8b90","address":"127.0.0.1:8302","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812825} /tidb/cdc/default/__cdc_meta__/capture/a29501a2-63b4-42a6-840c-0587130c3111 {"id":"a29501a2-63b4-42a6-840c-0587130c3111","address":"127.0.0.1:8301","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812822} /tidb/cdc/default/__cdc_meta__/capture/b6472395-9764-47f0-8976-fcd884079c38 {"id":"b6472395-9764-47f0-8976-fcd884079c38","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812817} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ceca5ff0 b6472395-9764-47f0-8976-fcd884079c38 /tidb/cdc/default/__cdc_meta__/owner/22318f42ceca6045 a29501a2-63b4-42a6-840c-0587130c3111 /tidb/cdc/default/__cdc_meta__/owner/22318f42ceca6077 4c34fd7e-7672-4395-849c-7eb15b1a8b90 /tidb/cdc/default/default/changefeed/info/29d389b7-5285-4ea5-9180-cfb488a5692b {"upstream-id":7365064896431326437,"namespace":"default","changefeed-id":"29d389b7-5285-4ea5-9180-cfb488a5692b","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-29206?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T16:53:40.7872657+08:00","start-ts":449527890402082817,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-79-gc950cce3a","epoch":449527892079804423} /tidb/cdc/default/default/changefeed/status/29d389b7-5285-4ea5-9180-cfb488a5692b {"checkpoint-ts":449527893914812423,"min-table-barrier-ts":449527893914812423,"admin-job-type":0} /tidb/cdc/default/default/task/position/4c34fd7e-7672-4395-849c-7eb15b1a8b90/29d389b7-5285-4ea5-9180-cfb488a5692b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/a29501a2-63b4-42a6-840c-0587130c3111/29d389b7-5285-4ea5-9180-cfb488a5692b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/b6472395-9764-47f0-8976-fcd884079c38/29d389b7-5285-4ea5-9180-cfb488a5692b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365064896431326437 {"id":7365064896431326437,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + grep -q 'etcd info' + echo ' *** processors info ***: changefeedID: default/29d389b7-5285-4ea5-9180-cfb488a5692b {UpstreamID:7365064896431326437 Namespace:default ID:29d389b7-5285-4ea5-9180-cfb488a5692b SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-29206?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-04 16:53:40.7872657 +0800 CST StartTs:449527890402082817 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc00273e990 State:normal Error: Warning: CreatorVersion:v8.2.0-alpha-79-gc950cce3a Epoch:449527892079804423} {CheckpointTs:449527893914812423 MinTableBarrierTs:449527893914812423 AdminJobType:noop} *** etcd info ***: /tidb/cdc/default/__cdc_meta__/capture/4c34fd7e-7672-4395-849c-7eb15b1a8b90 {"id":"4c34fd7e-7672-4395-849c-7eb15b1a8b90","address":"127.0.0.1:8302","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812825} /tidb/cdc/default/__cdc_meta__/capture/a29501a2-63b4-42a6-840c-0587130c3111 {"id":"a29501a2-63b4-42a6-840c-0587130c3111","address":"127.0.0.1:8301","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812822} /tidb/cdc/default/__cdc_meta__/capture/b6472395-9764-47f0-8976-fcd884079c38 {"id":"b6472395-9764-47f0-8976-fcd884079c38","address":"127.0.0.1:8300","version":"v8.2.0-alpha-79-gc950cce3a","git-hash":"c950cce3a9b105fd95bb2c788e1ab69ec32e0668","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714812817} /tidb/cdc/default/__cdc_meta__/meta/meta-version 1 /tidb/cdc/default/__cdc_meta__/owner/22318f42ceca5ff0 b6472395-9764-47f0-8976-fcd884079c38 /tidb/cdc/default/__cdc_meta__/owner/22318f42ceca6045 a29501a2-63b4-42a6-840c-0587130c3111 /tidb/cdc/default/__cdc_meta__/owner/22318f42ceca6077 4c34fd7e-7672-4395-849c-7eb15b1a8b90 /tidb/cdc/default/default/changefeed/info/29d389b7-5285-4ea5-9180-cfb488a5692b {"upstream-id":7365064896431326437,"namespace":"default","changefeed-id":"29d389b7-5285-4ea5-9180-cfb488a5692b","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-29206?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-04T16:53:40.7872657+08:00","start-ts":449527890402082817,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-79-gc950cce3a","epoch":449527892079804423} /tidb/cdc/default/default/changefeed/status/29d389b7-5285-4ea5-9180-cfb488a5692b {"checkpoint-ts":449527893914812423,"min-table-barrier-ts":449527893914812423,"admin-job-type":0} /tidb/cdc/default/default/task/position/4c34fd7e-7672-4395-849c-7eb15b1a8b90/29d389b7-5285-4ea5-9180-cfb488a5692b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/a29501a2-63b4-42a6-840c-0587130c3111/29d389b7-5285-4ea5-9180-cfb488a5692b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/task/position/b6472395-9764-47f0-8976-fcd884079c38/29d389b7-5285-4ea5-9180-cfb488a5692b {"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null} /tidb/cdc/default/default/upstream/7365064896431326437 {"id":7365064896431326437,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}' + break + set +x table move_table.usertable exists go: downloading github.com/IBM/sarama v1.41.2 go: downloading github.com/xdg/scram v1.0.5 go: downloading github.com/benbjohnson/clock v1.3.5 go: downloading go.etcd.io/etcd/server/v3 v3.5.12 go: downloading github.com/cakturk/go-netstat v0.0.0-20200220111822-e5b49efee7a5 go: downloading github.com/tinylib/msgp v1.1.6 go: downloading github.com/KimMachineGun/automemlimit v0.2.4 go: downloading github.com/go-mysql-org/go-mysql v1.7.1-0.20240314115043-2199dfb0ba98 go: downloading github.com/apache/pulsar-client-go v0.11.0 go: downloading github.com/aws/aws-sdk-go-v2 v1.19.1 go: downloading gorm.io/gorm v1.24.5 go: downloading github.com/gavv/monotime v0.0.0-20190418164738-30dba4353424 go: downloading github.com/pierrec/lz4/v4 v4.1.18 go: downloading github.com/edwingeng/deque v0.0.0-20191220032131-8596380dee17 go: downloading github.com/containerd/cgroups v1.0.4 go: downloading github.com/gin-gonic/gin v1.9.1 go: downloading github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 go: downloading github.com/phayes/freeport v0.0.0-20180830031419-95f893ade6f2 go: downloading github.com/xdg/stringprep v1.0.3 go: downloading github.com/stretchr/objx v0.5.2 go: downloading github.com/philhofer/fwd v1.1.1 go: downloading github.com/jinzhu/now v1.1.5 go: downloading github.com/jinzhu/inflection v1.0.0 go: downloading github.com/cilium/ebpf v0.4.0 go: downloading github.com/sirupsen/logrus v1.9.3 go: downloading github.com/opencontainers/runtime-spec v1.0.2 go: downloading github.com/godbus/dbus/v5 v5.0.4 go: downloading github.com/siddontang/go v0.0.0-20180604090527-bdc77568d726 go: downloading github.com/siddontang/go-log v0.0.0-20180807004314-8d05993dda07 go: downloading github.com/gin-contrib/sse v0.1.0 go: downloading github.com/pelletier/go-toml/v2 v2.0.8 go: downloading github.com/ugorji/go/codec v1.2.11 go: downloading github.com/go-playground/validator/v10 v10.14.0 go: downloading github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2 go: downloading github.com/aws/smithy-go v1.13.5 go: downloading github.com/eapache/go-resiliency v1.4.0 go: downloading github.com/eapache/queue v1.1.0 go: downloading github.com/eapache/go-xerial-snappy v0.0.0-20230731223053-c322873962e3 go: downloading github.com/jcmturner/gokrb5/v8 v8.4.4 go: downloading github.com/hashicorp/go-multierror v1.1.1 go: downloading github.com/jcmturner/gofork v1.7.6 go: downloading github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 go: downloading github.com/bits-and-blooms/bitset v1.4.0 go: downloading github.com/spaolacci/murmur3 v1.1.0 go: downloading github.com/go-playground/universal-translator v0.18.1 go: downloading github.com/gabriel-vasile/mimetype v1.4.2 go: downloading github.com/leodido/go-urn v1.2.4 go: downloading golang.org/x/mod v0.17.0 go: downloading github.com/pierrec/lz4 v2.6.1+incompatible go: downloading github.com/linkedin/goavro/v2 v2.11.1 go: downloading github.com/AthenZ/athenz v1.10.39 go: downloading github.com/99designs/keyring v1.2.1 go: downloading github.com/hashicorp/errwrap v1.0.0 go: downloading github.com/go-playground/locales v0.14.1 go: downloading github.com/dvsekhvalnov/jose2go v1.5.0 go: downloading github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c go: downloading github.com/mtibben/percent v0.2.1 go: downloading github.com/grpc-ecosystem/grpc-gateway v1.16.0 go: downloading github.com/soheilhy/cmux v0.1.5 go: downloading github.com/tmc/grpc-websocket-proxy v0.0.0-20220101234140-673ab2c3ae75 go: downloading go.etcd.io/etcd/pkg/v3 v3.5.12 go: downloading sigs.k8s.io/yaml v1.4.0 go: downloading go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.22.0 go: downloading go.opentelemetry.io/otel/sdk v1.22.0 go: downloading go.etcd.io/etcd/raft/v3 v3.5.12 go: downloading go.etcd.io/bbolt v1.3.9 go: downloading github.com/jonboulle/clockwork v0.4.0 go: downloading github.com/golang-jwt/jwt/v4 v4.5.0 go: downloading go.etcd.io/etcd/client/v2 v2.305.12 go: downloading github.com/xiang90/probing v0.0.0-20221125231312-a49e3df8f510 go: downloading github.com/gorilla/websocket v1.5.1 go: downloading github.com/hashicorp/go-uuid v1.0.3 go: downloading github.com/jcmturner/aescts/v2 v2.0.0 go: downloading github.com/jcmturner/rpc/v2 v2.0.3 go: downloading github.com/jcmturner/dnsutils/v2 v2.0.0 go: downloading go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.22.0 go: downloading go.opentelemetry.io/proto/otlp v1.1.0 go: downloading github.com/cenkalti/backoff/v4 v4.2.1 go: downloading github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.1 \033[0;36m<<< Run all test success >>>\033[0m [Pipeline] } Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1836/tiflow-cdc already exists) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } table mark.finish_mark_3 not exists for 21-th check, retry later [Pipeline] // withCredentials go: downloading github.com/ardielle/ardielle-go v1.5.2 [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] } {"level":"warn","ts":"2024-05-04T16:53:51.401421+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001230000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-05-04T16:53:51.402048+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000ec5500/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} {"level":"warn","ts":"2024-05-04T16:53:51.45387+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000efe1c0/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""} table mark.finish_mark_3 not exists for 22-th check, retry later start tidb cluster in /tmp/tidb_cdc_test/ddl_attributes Starting Upstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Starting Downstream PD... Release Version: v8.2.0-alpha-14-g1679dbca2 Edition: Community Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360 Git Branch: master UTC Build Time: 2024-04-30 08:09:12 Verifying upstream PD is started... Aborted by Jenkins Admin Sending interrupt signal to process Killing processes table mark.finish_mark_3 not exists for 23-th check, retry later kill finished with exit code 0 Sending interrupt signal to process Killing processes script returned exit code 143 =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/split_region/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... =================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/cli_with_auth/run.sh using Sink-Type: kafka... <<================= The 1 times to try to start tidb cluster... kill finished with exit code 0 Sending interrupt signal to process Killing processes table mark.finish_mark_3 not exists for 24-th check, retry later kill finished with exit code 0 Sending interrupt signal to process Killing processes ++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced % Total % Received % Xferd Average Speed Time Time Time Current Dload Upload Total Spent Left Speed {"level":"warn","ts":1714812835.8249433,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00205b500/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: EOF"} script returned exit code 143 kill finished with exit code 0 0 0 0 0 0 0 0 0 --:--:-- --:--:-- --:--:-- 0Sending interrupt signal to process Killing processes script returned exit code 143 kill finished with exit code 0 Sending interrupt signal to process Killing processes kill finished with exit code 0 Sending interrupt signal to process Killing processes ++ stop_tidb_cluster script returned exit code 143 script returned exit code 143 script returned exit code 143 kill finished with exit code 0 Sending interrupt signal to process Killing processes kill finished with exit code 0 Sending interrupt signal to process Killing processes kill finished with exit code 0 [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache script returned exit code 143 [Pipeline] // cache [Pipeline] // cache [Pipeline] // cache [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] // dir [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // cache [Pipeline] } {"level":"warn","ts":1714812837.320184,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001768fc0/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: EOF"} Error: [CDC:ErrOwnerNotFound]owner not found {"level":"warn","ts":1714812839.3201582,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001768fc0/127.0.0.1:2379","attempt":1,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""} script returned exit code 143 [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] // withCredentials [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // withCredentials [Pipeline] // cache [Pipeline] } [Pipeline] } [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // timeout [Pipeline] // dir [Pipeline] } [Pipeline] } [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] // container [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // stage [Pipeline] // withCredentials [Pipeline] } [Pipeline] } [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv github.com/DataDog/zstd: gcc: signal: terminated github.com/tikv/client-go/v2/txnkv/transaction: /usr/local/go/pkg/tool/linux_amd64/compile: signal: terminated script returned exit code 143 [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // container [Pipeline] // timeout [Pipeline] } [Pipeline] } [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] // node [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // withEnv [Pipeline] // stage [Pipeline] } [Pipeline] } [Pipeline] // cache [Pipeline] } [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] // podTemplate [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // node [Pipeline] // container [Pipeline] } [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] // withEnv [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // podTemplate [Pipeline] // withEnv [Pipeline] } [Pipeline] } [Pipeline] // withCredentials [Pipeline] } [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G07' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G08' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G09' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G10' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G11' [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G12' [Pipeline] // withEnv [Pipeline] // node [Pipeline] } Click here to forcibly terminate running steps [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // stage [Pipeline] // podTemplate [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G05' [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // stage [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G04' [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // stage [Pipeline] } Failed in branch Matrix - TEST_GROUP = 'G00' [Pipeline] // parallel [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] End of Pipeline org.jenkinsci.plugins.workflow.actions.ErrorAction$ErrorId: 6ac42164-c4e7-4a88-bb77-464d309eac05 Failed in branch Matrix - TEST_GROUP = 'G07' Finished: ABORTED