Skip to content

Console Output

Started by user Jenkins Admin
Obtained pipelines/pingcap/tiflow/latest/pull_cdc_integration_kafka_test.groovy from git https://github.com/PingCAP-QE/ci.git
Loading library tipipeline@main
Library tipipeline@main is cached. Copying from home.
[Pipeline] Start of Pipeline
[Pipeline] readJSON
[Pipeline] readTrusted
Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git
[Pipeline] podTemplate
[Pipeline] {
[Pipeline] node
Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1855-xp9bz-q5z73
Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1855-xp9bz-q5z73 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1855-xp9bz-6kw2s
---
apiVersion: "v1"
kind: "Pod"
metadata:
  annotations:
    buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
    runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
  labels:
    jenkins/jenkins-jenkins-agent: "true"
    jenkins/label-digest: "c94f5e65860e6d02dc0207c2eaa8eed14befb4ad"
    jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1855-xp9bz"
  name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-xp9bz-q5z73"
  namespace: "jenkins-tiflow"
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
        - matchExpressions:
          - key: "kubernetes.io/arch"
            operator: "In"
            values:
            - "amd64"
  containers:
  - image: "wurstmeister/zookeeper"
    imagePullPolicy: "IfNotPresent"
    name: "zookeeper"
    resources:
      limits:
        cpu: "2000m"
        memory: "4Gi"
      requests:
        cpu: "2000m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - args:
    - "cat"
    image: "hub.pingcap.net/jenkins/golang-tini:1.21"
    imagePullPolicy: "Always"
    name: "golang"
    resources:
      limits:
        cpu: "12"
        memory: "32Gi"
      requests:
        cpu: "12"
        memory: "32Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_CREATE_TOPICS"
      value: "big-message-test:1:1"
    - name: "KAFKA_BROKER_ID"
      value: "1"
    - name: "KAFKA_SSL_KEYSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_ZOOKEEPER_CONNECT"
      value: "localhost:2181"
    - name: "KAFKA_MESSAGE_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_REPLICA_FETCH_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_ADVERTISED_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "ZK"
      value: "zk"
    - name: "KAFKA_SSL_KEYSTORE_LOCATION"
      value: "/tmp/kafka.server.keystore.jks"
    - name: "KAFKA_SSL_KEY_PASSWORD"
      value: "test1234"
    - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "KAFKA_SSL_TRUSTSTORE_LOCATION"
      value: "/tmp/kafka.server.truststore.jks"
    - name: "RACK_COMMAND"
      value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\
        \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\
        \ -o /tmp/kafka.server.truststore.jks"
    image: "wurstmeister/kafka:2.12-2.4.1"
    imagePullPolicy: "IfNotPresent"
    name: "kafka"
    resources:
      limits:
        cpu: "4000m"
        memory: "6Gi"
      requests:
        cpu: "4000m"
        memory: "6Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_SERVER"
      value: "127.0.0.1:9092"
    - name: "ZOOKEEPER_SERVER"
      value: "127.0.0.1:2181"
    - name: "DOWNSTREAM_DB_HOST"
      value: "127.0.0.1"
    - name: "USE_FLAT_MESSAGE"
      value: "true"
    - name: "DOWNSTREAM_DB_PORT"
      value: "3306"
    - name: "DB_NAME"
      value: "test"
    image: "rustinliu/ticdc-canal-json-adapter:latest"
    imagePullPolicy: "IfNotPresent"
    name: "canal-adapter"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/network-multitool"
    name: "net-tool"
    resources:
      limits:
        memory: "128Mi"
        cpu: "100m"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/python3-requests:latest"
    name: "report"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "MYSQL_ROOT_PASSWORD"
      value: ""
    - name: "MYSQL_USER"
      value: "mysqluser"
    - name: "MYSQL_PASSWORD"
      value: "mysqlpw"
    - name: "MYSQL_ALLOW_EMPTY_PASSWORD"
      value: "yes"
    - name: "MYSQL_TCP_PORT"
      value: "3310"
    image: "quay.io/debezium/example-mysql:2.4"
    imagePullPolicy: "IfNotPresent"
    name: "mysql"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "BOOTSTRAP_SERVERS"
      value: "127.0.0.1:9092"
    - name: "GROUP_ID"
      value: "1"
    - name: "CONFIG_STORAGE_TOPIC"
      value: "my_connect_configs"
    - name: "OFFSET_STORAGE_TOPIC"
      value: "my_connect_offsets"
    - name: "STATUS_STORAGE_TOPIC"
      value: "my_connect_statuses"
    image: "quay.io/debezium/connect:2.4"
    name: "connect"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "JENKINS_SECRET"
      value: "********"
    - name: "JENKINS_TUNNEL"
      value: "jenkins-agent.apps.svc.cluster.local:50000"
    - name: "JENKINS_AGENT_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-xp9bz-q5z73"
    - name: "JENKINS_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-xp9bz-q5z73"
    - name: "JENKINS_AGENT_WORKDIR"
      value: "/home/jenkins/agent"
    - name: "JENKINS_URL"
      value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/"
    image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2"
    name: "jnlp"
    resources:
      requests:
        memory: "256Mi"
        cpu: "100m"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  restartPolicy: "Never"
  securityContext:
    fsGroup: 1000
  volumes:
  - emptyDir: {}
    name: "volume-0"
  - emptyDir:
      medium: ""
    name: "workspace-volume"

Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1855-xp9bz-q5z73 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
[Pipeline] {
[Pipeline] stage
[Pipeline] { (Declarative: Checkout SCM)
[Pipeline] checkout
The recommended git tool is: git
No credentials specified
Cloning the remote Git repository
Using shallow clone with depth 1
Cloning repository https://github.com/PingCAP-QE/ci.git
 > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10
Fetching upstream changes from https://github.com/PingCAP-QE/ci.git
 > git --version # timeout=10
 > git --version # 'git version 2.39.2'
 > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5
Avoid second fetch
Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main)
Commit message: "fix(br): use failpoint tidb-server instead (#2951)"
 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git rev-parse origin/main^{commit} # timeout=10
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10
 > git rev-list --no-walk 03312178c534dce949face80c69812d989e55009 # timeout=10
[Pipeline] }
[Pipeline] // stage
[Pipeline] withEnv
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 1 hr 5 min
[Pipeline] {
[Pipeline] stage
[Pipeline] { (Debug info)
[Pipeline] sh
+ printenv
PROW_JOB_ID=d886c6eb-17f2-4f64-86e7-91214509cf93
JENKINS_NODE_COOKIE=035be2fe-66b5-4a9c-bd17-432458798b5c
BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/
GOLANG_VERSION=1.21.0
HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-xp9bz-q5z73
HUDSON_SERVER_COOKIE=83ef27fe9acccc92
KUBERNETES_PORT=tcp://10.233.0.1:443
KUBERNETES_PORT_443_TCP_PORT=443
TERM=xterm
STAGE_NAME=Debug info
BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855
KUBERNETES_SERVICE_PORT=443
GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009
JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786957579159605252","prowjobid":"d886c6eb-17f2-4f64-86e7-91214509cf93","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","author_link":"https://github.com/lidezhu"}]}}
KUBERNETES_SERVICE_HOST=10.233.0.1
WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/
RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=changes
RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=artifacts
FILE_SERVER_URL=http://fileserver.pingcap.net
JENKINS_HOME=/var/jenkins_home
GIT_COMMIT=03312178c534dce949face80c69812d989e55009
PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect
GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct
_=/usr/bin/printenv
POD_CONTAINER=golang
PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
HUDSON_URL=https://do.pingcap.net/jenkins/
JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test
TZ=Asia/Shanghai
BUILD_DISPLAY_NAME=#1855
JENKINS_URL=https://do.pingcap.net/jenkins/
BUILD_ID=1786957579159605252
GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742
JOB_BASE_NAME=pull_cdc_integration_kafka_test
GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009
RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=tests
SHLVL=3
HOME=/home/jenkins
POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-xp9bz
GOROOT=/usr/local/go
GIT_BRANCH=origin/main
KUBERNETES_PORT_443_TCP_PROTO=tcp
TINI_VERSION=v0.19.0
CI=true
KUBERNETES_SERVICE_PORT_HTTPS=443
WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp
EXECUTOR_NUMBER=0
JENKINS_SERVER_COOKIE=durable-236df335481f9578f70eb859f68d5ceead3aa27f6c9385fda1ec4c08661c0305
NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-xp9bz pingcap-tiflow-pull-cdc-integration-kafka-test-1855-xp9bz-q5z73
GIT_URL=https://github.com/PingCAP-QE/ci.git
HUDSON_HOME=/var/jenkins_home
CLASSPATH=
NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-xp9bz-q5z73
GOPATH=/go
JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect
BUILD_NUMBER=1855
KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1
KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443
GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz
+ echo -------------------------
-------------------------
+ go env
GO111MODULE=''
GOARCH='amd64'
GOBIN=''
GOCACHE='/home/jenkins/.cache/go-build'
GOENV='/home/jenkins/.config/go/env'
GOEXE=''
GOEXPERIMENT=''
GOFLAGS=''
GOHOSTARCH='amd64'
GOHOSTOS='linux'
GOINSECURE=''
GOMODCACHE='/go/pkg/mod'
GONOPROXY=''
GONOSUMDB=''
GOOS='linux'
GOPATH='/go'
GOPRIVATE=''
GOPROXY='http://goproxy.apps.svc,https://proxy.golang.org,direct'
GOROOT='/usr/local/go'
GOSUMDB='sum.golang.org'
GOTMPDIR=''
GOTOOLCHAIN='auto'
GOTOOLDIR='/usr/local/go/pkg/tool/linux_amd64'
GOVCS=''
GOVERSION='go1.21.0'
GCCGO='gccgo'
GOAMD64='v1'
AR='ar'
CC='gcc'
CXX='g++'
CGO_ENABLED='1'
GOMOD='/dev/null'
GOWORK=''
CGO_CFLAGS='-O2 -g'
CGO_CPPFLAGS=''
CGO_CXXFLAGS='-O2 -g'
CGO_FFLAGS='-O2 -g'
CGO_LDFLAGS='-O2 -g'
PKG_CONFIG='pkg-config'
GOGCCFLAGS='-fPIC -m64 -pthread -Wl,--no-gc-sections -fmessage-length=0 -fdebug-prefix-map=/tmp/go-build3095919039=/tmp/go-build -gno-record-gcc-switches'
+ echo -------------------------
-------------------------
+ echo 'debug command: kubectl -n jenkins-tiflow exec -ti pingcap-tiflow-pull-cdc-integration-kafka-test-1855-xp9bz-q5z73 bash'
debug command: kubectl -n jenkins-tiflow exec -ti pingcap-tiflow-pull-cdc-integration-kafka-test-1855-xp9bz-q5z73 bash
[Pipeline] container
[Pipeline] {
[Pipeline] sh
+ dig github.com

; <<>> DiG 9.18.16 <<>> github.com
;; global options: +cmd
;; Got answer:
;; ->>HEADER<<- opcode: QUERY, status: NOERROR, id: 54325
;; flags: qr aa rd ra; QUERY: 1, ANSWER: 1, AUTHORITY: 0, ADDITIONAL: 1

;; OPT PSEUDOSECTION:
; EDNS: version: 0, flags:; udp: 1232
; COOKIE: eecb3338ac1081aa (echoed)
;; QUESTION SECTION:
;github.com.			IN	A

;; ANSWER SECTION:
github.com.		18	IN	A	20.205.243.166

;; Query time: 0 msec
;; SERVER: 169.254.25.10#53(169.254.25.10) (UDP)
;; WHEN: Sun May 05 03:15:01 UTC 2024
;; MSG SIZE  rcvd: 77

[Pipeline] script
[Pipeline] {
[Pipeline] }
[Pipeline] // script
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // stage
[Pipeline] stage
[Pipeline] { (Check diff files)
[Pipeline] container
[Pipeline] {
[Pipeline] script
[Pipeline] {
[Pipeline] withCredentials
Masking supported pattern matches of $token
[Pipeline] {
[Pipeline] httpRequest
Warning: A secret was passed to "httpRequest" using Groovy String interpolation, which is insecure.
		 Affected argument(s) used the following variable(s): [token]
		 See https://jenkins.io/redirect/groovy-string-interpolation for details.
HttpMethod: GET
URL: https://api.github.com/repos/pingcap/tiflow/pulls/10919/files?page=1&per_page=100
Content-Type: application/json
Authorization: *****
Sending request to url: https://api.github.com/repos/pingcap/tiflow/pulls/10919/files?page=1&per_page=100
Response Code: HTTP/1.1 200 OK
Success: Status code 200 is in the accepted range: 100:399
[Pipeline] httpRequest
Warning: A secret was passed to "httpRequest" using Groovy String interpolation, which is insecure.
		 Affected argument(s) used the following variable(s): [token]
		 See https://jenkins.io/redirect/groovy-string-interpolation for details.
HttpMethod: GET
URL: https://api.github.com/repos/pingcap/tiflow/pulls/10919/files?page=2&per_page=100
Content-Type: application/json
Authorization: *****
Sending request to url: https://api.github.com/repos/pingcap/tiflow/pulls/10919/files?page=2&per_page=100
Response Code: HTTP/1.1 200 OK
Success: Status code 200 is in the accepted range: 100:399
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] echo
pr_diff_files: [cdc/model/kv.go, cdc/model/sink.go, cdc/model/sink_test.go, cdc/processor/processor.go, cdc/processor/sinkmanager/manager.go, cdc/processor/sourcemanager/manager.go, cdc/redo/reader/reader.go, cdc/sink/dmlsink/factory/factory.go, cdc/sink/dmlsink/txn/mysql/mysql.go, cdc/sink/dmlsink/txn/mysql/mysql_test.go, cmd/kafka-consumer/main.go, cmd/pulsar-consumer/main.go, cmd/storage-consumer/main.go, errors.toml, pkg/applier/redo.go, pkg/applier/redo_test.go, pkg/errors/cdc_errors.go, pkg/errors/helper.go, pkg/sink/codec/open/open_protocol_decoder.go, tests/integration_tests/changefeed_dup_error_restart/conf/diff_config.toml, tests/integration_tests/changefeed_dup_error_restart/conf/workload, tests/integration_tests/changefeed_dup_error_restart/run.sh, tests/integration_tests/force_replicate_table/run.sh, tests/integration_tests/run_group.sh]
[Pipeline] echo
diff file not matched: cdc/model/kv.go
[Pipeline] }
[Pipeline] // script
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // stage
[Pipeline] stage
[Pipeline] { (Checkout)
[Pipeline] timeout
Timeout set to expire in 10 min
[Pipeline] {
[Pipeline] dir
Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
[Pipeline] {
[Pipeline] cache
Cache restored successfully (git/pingcap/tiflow/rev-be15534-0de8dc3)
203830272 bytes in 0.89 secs (229199322 bytes/sec)
[Pipeline] {
[Pipeline] retry
[Pipeline] {
[Pipeline] script
[Pipeline] {
[Pipeline] sh
git version 2.36.6
Reinitialized existing Git repository in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/.git/
.git
HEAD is now at 0de8dc3e4 fix test again
POST git-upload-pack (656 bytes)
From https://github.com/pingcap/tiflow
 = [up to date]          master               -> origin/master
 = [up to date]          refs/pull/10919/head -> origin/pr/10919/head
Previous HEAD position was 0de8dc3e4 fix test again
HEAD is now at be1553484 codec(ticdc): avro simplify the unit test (#11010)
๐Ÿšง Checkouting to base SHA:be1553484fe4c03594eabb8d7435c694e5fd7224...
HEAD is now at be1553484 codec(ticdc): avro simplify the unit test (#11010)
โœ… Checked. ๐ŸŽ‰
๐Ÿงพ HEAD info:
be1553484fe4c03594eabb8d7435c694e5fd7224
be1553484 codec(ticdc): avro simplify the unit test (#11010)
2a7a65c6f Support Sequences (#10203)
36e9e1bf6 cli(ticdc): allow client authentication to be enabled without tls (#11005)
๐Ÿšง Pre-merge heads of pull requests to base SHA: be1553484fe4c03594eabb8d7435c694e5fd7224 ...
Updating be1553484..0de8dc3e4
Fast-forward
 cdc/model/kv.go                                    |   5 +
 cdc/model/sink.go                                  |  35 ++-
 cdc/model/sink_test.go                             |   9 +-
 cdc/processor/processor.go                         |  21 +-
 cdc/processor/sinkmanager/manager.go               |   5 +
 cdc/processor/sourcemanager/manager.go             |  66 +++-
 cdc/redo/reader/reader.go                          |  21 +-
 cdc/sink/dmlsink/factory/factory.go                |   8 +-
 cdc/sink/dmlsink/txn/mysql/mysql.go                |  89 +++---
 cdc/sink/dmlsink/txn/mysql/mysql_test.go           |   2 +-
 cmd/kafka-consumer/main.go                         |   4 +-
 cmd/pulsar-consumer/main.go                        |  17 +-
 cmd/storage-consumer/main.go                       |   4 +-
 errors.toml                                        |   5 +
 pkg/applier/redo.go                                | 303 +++++++++++++++++-
 pkg/applier/redo_test.go                           | 347 ++++++++++++++++++++-
 pkg/errors/cdc_errors.go                           |   4 +
 pkg/errors/helper.go                               |  19 ++
 pkg/sink/codec/open/open_protocol_decoder.go       |   2 +
 .../conf/diff_config.toml                          |  29 ++
 .../changefeed_dup_error_restart/conf/workload     |  13 +
 .../changefeed_dup_error_restart/run.sh            |  54 ++++
 .../integration_tests/force_replicate_table/run.sh |   4 +-
 tests/integration_tests/run_group.sh               |   2 +-
 24 files changed, 970 insertions(+), 98 deletions(-)
 create mode 100644 tests/integration_tests/changefeed_dup_error_restart/conf/diff_config.toml
 create mode 100644 tests/integration_tests/changefeed_dup_error_restart/conf/workload
 create mode 100755 tests/integration_tests/changefeed_dup_error_restart/run.sh
๐Ÿงพ Pre-merged result:
0de8dc3e43ec741eba58047155ce7f3dba8eb4f7
0de8dc3e4 fix test again
6a342866d fix bit test
0dd104704 fix
โœ… Pre merged ๐ŸŽ‰
โœ… ~~~~~All done.~~~~~~
[Pipeline] }
[Pipeline] // script
[Pipeline] }
[Pipeline] // retry
[Pipeline] }
Cache not saved (git/pingcap/tiflow/rev-be15534-0de8dc3 already exists)
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
[Pipeline] stage
[Pipeline] { (prepare)
[Pipeline] timeout
Timeout set to expire in 20 min
[Pipeline] {
[Pipeline] dir
Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/third_party_download
[Pipeline] {
[Pipeline] retry
[Pipeline] {
[Pipeline] sh
+ cd ../tiflow
+ ./scripts/download-integration-test-binaries.sh master
Download binaries...
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100    41  100    41    0     0    160      0 --:--:-- --:--:-- --:--:--   160
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100    41  100    41    0     0    779      0 --:--:-- --:--:-- --:--:--   788
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100    41  100    41    0     0   2603      0 --:--:-- --:--:-- --:--:--  2733
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100    41  100    41    0     0    552      0 --:--:-- --:--:-- --:--:--   554
>>>
download tidb-server.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/tidb/600b2ed4bf0aa38224a1c4c4c68831820735515c/centos7/tidb-server.tar.gz
2024-05-05 11:15:26 URL:http://fileserver.pingcap.net/download/builds/pingcap/tidb/600b2ed4bf0aa38224a1c4c4c68831820735515c/centos7/tidb-server.tar.gz [536570515/536570515] -> "tmp/tidb-server.tar.gz" [1]
>>>
download pd-server.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/pd/1679dbca25b3483d1375c7e747da27e99ad77360/centos7/pd-server.tar.gz
2024-05-05 11:15:44 URL:http://fileserver.pingcap.net/download/builds/pingcap/pd/1679dbca25b3483d1375c7e747da27e99ad77360/centos7/pd-server.tar.gz [187372022/187372022] -> "tmp/pd-server.tar.gz" [1]
>>>
download tikv-server.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/tikv/72a0fd5b00235a7c56014b77ddd933e2a0d33c88/centos7/tikv-server.tar.gz
2024-05-05 11:16:26 URL:http://fileserver.pingcap.net/download/builds/pingcap/tikv/72a0fd5b00235a7c56014b77ddd933e2a0d33c88/centos7/tikv-server.tar.gz [919098782/919098782] -> "tmp/tikv-server.tar.gz" [1]
>>>
download tiflash.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/tiflash/master/8e170090fad91c94bef8d908e21c195c1d145b02/centos7/tiflash.tar.gz
2024-05-05 11:16:54 URL:http://fileserver.pingcap.net/download/builds/pingcap/tiflash/master/8e170090fad91c94bef8d908e21c195c1d145b02/centos7/tiflash.tar.gz [456057803/456057803] -> "tmp/tiflash.tar.gz" [1]
>>>
download minio.tar.gz from http://fileserver.pingcap.net/download/minio.tar.gz
2024-05-05 11:16:59 URL:http://fileserver.pingcap.net/download/minio.tar.gz [17718777/17718777] -> "tmp/minio.tar.gz" [1]
>>>
download go-ycsb from http://fileserver.pingcap.net/download/builds/pingcap/go-ycsb/test-br/go-ycsb
2024-05-05 11:17:02 URL:http://fileserver.pingcap.net/download/builds/pingcap/go-ycsb/test-br/go-ycsb [45975512/45975512] -> "third_bin/go-ycsb" [1]
>>>
download jq from http://fileserver.pingcap.net/download/builds/pingcap/test/jq-1.6/jq-linux64
2024-05-05 11:17:03 URL:http://fileserver.pingcap.net/download/builds/pingcap/test/jq-1.6/jq-linux64 [3953824/3953824] -> "third_bin/jq" [1]
>>>
download etcd.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/cdc/etcd-v3.4.7-linux-amd64.tar.gz
2024-05-05 11:17:04 URL:http://fileserver.pingcap.net/download/builds/pingcap/cdc/etcd-v3.4.7-linux-amd64.tar.gz [17310840/17310840] -> "tmp/etcd.tar.gz" [1]
>>>
download sync_diff_inspector.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/cdc/sync_diff_inspector_hash-d671b084_linux-amd64.tar.gz
2024-05-05 11:17:08 URL:http://fileserver.pingcap.net/download/builds/pingcap/cdc/sync_diff_inspector_hash-d671b084_linux-amd64.tar.gz [79877126/79877126] -> "tmp/sync_diff_inspector.tar.gz" [1]
>>>
download schema-registry.tar.gz from http://fileserver.pingcap.net/download/builds/pingcap/cdc/schema-registry.tar.gz
2024-05-05 11:17:25 URL:http://fileserver.pingcap.net/download/builds/pingcap/cdc/schema-registry.tar.gz [278386006/278386006] -> "tmp/schema-registry.tar.gz" [1]
Download SUCCESS
+ ls -alh ./bin
total 1.9G
drwxr-sr-x  6 jenkins jenkins  4.0K May  5 11:17 .
drwxr-sr-x 19 jenkins jenkins  4.0K May  5 11:17 ..
drwxr-sr-x  2 jenkins jenkins  4.0K May 19  2023 bin
drwxr-sr-x  4 jenkins jenkins  4.0K May 10  2023 etc
-rwxr-xr-x  1 jenkins jenkins   17M Apr  2  2020 etcdctl
-rwxr-xr-x  1 jenkins jenkins   44M May  5 11:17 go-ycsb
-rwxr-xr-x  1 jenkins jenkins  3.8M May  5 11:17 jq
drwxr-sr-x  3 jenkins jenkins  4.0K May 10  2023 lib
lrwxrwxrwx  1 jenkins jenkins    13 Apr 30 11:15 libc++.so.1 -> libc++.so.1.0
-rwxr-xr-x  1 jenkins jenkins 1016K Nov  7 01:00 libc++.so.1.0
lrwxrwxrwx  1 jenkins jenkins    16 Apr 30 11:15 libc++abi.so.1 -> libc++abi.so.1.0
-rwxr-xr-x  1 jenkins jenkins  358K Nov  7 01:00 libc++abi.so.1.0
lrwxrwxrwx  1 jenkins jenkins    13 Apr 30 11:15 libgmssl.so -> libgmssl.so.3
lrwxrwxrwx  1 jenkins jenkins    15 Apr 30 11:15 libgmssl.so.3 -> libgmssl.so.3.0
-rwxr-xr-x  1 jenkins jenkins  2.6M Apr 30 10:34 libgmssl.so.3.0
-rwxr-xr-x  1 jenkins jenkins  272M Apr 30 11:16 libtiflash_proxy.so
-rwxr-xr-x  1 jenkins jenkins   50M Jul 29  2020 minio
-rwxr-xr-x  1 jenkins jenkins   37M Apr 30 16:11 pd-api-bench
-rwxr-xr-x  1 jenkins jenkins   44M Apr 30 16:10 pd-ctl
-rwxr-xr-x  1 jenkins jenkins   36M Apr 30 16:10 pd-heartbeat-bench
-rwxr-xr-x  1 jenkins jenkins   32M Apr 30 16:10 pd-recover
-rwxr-xr-x  1 jenkins jenkins  106M Apr 30 16:10 pd-server
-rwxr-xr-x  1 jenkins jenkins   26M Apr 30 16:10 pd-tso-bench
-rwxr-xr-x  1 jenkins jenkins  3.0M Apr 30 16:11 pd-ut
-rwxr-xr-x  1 jenkins jenkins   32M Apr 30 16:10 regions-dump
drwxr-sr-x  4 jenkins jenkins  4.0K May 10  2023 share
-rwxr-xr-x  1 jenkins jenkins   32M Apr 30 16:11 stores-dump
-rwxr-xr-x  1 jenkins jenkins  192M Sep 22  2023 sync_diff_inspector
-rwxr-xr-x  1 jenkins jenkins  208M May  1 10:57 tidb-server
-rwxr-xr-x  1 jenkins jenkins  380M Apr 30 11:15 tiflash
-rwxr-xr-x  1 jenkins jenkins  418M Apr 30 11:29 tikv-server
-rwxr-xr-x  1 jenkins jenkins  2.0M Apr 30 16:11 xprog
+ make check_third_party_binary
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/tidb-server
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/tikv-server
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/pd-server
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/tiflash
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/pd-ctl
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/sync_diff_inspector
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/go-ycsb
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/etcdctl
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/jq
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/minio
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/bin/schema-registry-start
+ cd -
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/third_party_download
+ mkdir -p bin
+ mv ../tiflow/bin/bin ../tiflow/bin/etc ../tiflow/bin/etcdctl ../tiflow/bin/go-ycsb ../tiflow/bin/jq ../tiflow/bin/lib ../tiflow/bin/libc++.so.1 ../tiflow/bin/libc++.so.1.0 ../tiflow/bin/libc++abi.so.1 ../tiflow/bin/libc++abi.so.1.0 ../tiflow/bin/libgmssl.so ../tiflow/bin/libgmssl.so.3 ../tiflow/bin/libgmssl.so.3.0 ../tiflow/bin/libtiflash_proxy.so ../tiflow/bin/minio ../tiflow/bin/pd-api-bench ../tiflow/bin/pd-ctl ../tiflow/bin/pd-heartbeat-bench ../tiflow/bin/pd-recover ../tiflow/bin/pd-server ../tiflow/bin/pd-tso-bench ../tiflow/bin/pd-ut ../tiflow/bin/regions-dump ../tiflow/bin/share ../tiflow/bin/stores-dump ../tiflow/bin/sync_diff_inspector ../tiflow/bin/tidb-server ../tiflow/bin/tiflash ../tiflow/bin/tikv-server ../tiflow/bin/xprog ./bin/
+ ls -alh ./bin
total 1.9G
drwxr-sr-x 6 jenkins jenkins  4.0K May  5 11:17 .
drwxr-sr-x 3 jenkins jenkins  4.0K May  5 11:17 ..
drwxr-sr-x 2 jenkins jenkins  4.0K May 19  2023 bin
drwxr-sr-x 4 jenkins jenkins  4.0K May 10  2023 etc
-rwxr-xr-x 1 jenkins jenkins   17M Apr  2  2020 etcdctl
-rwxr-xr-x 1 jenkins jenkins   44M May  5 11:17 go-ycsb
-rwxr-xr-x 1 jenkins jenkins  3.8M May  5 11:17 jq
drwxr-sr-x 3 jenkins jenkins  4.0K May 10  2023 lib
lrwxrwxrwx 1 jenkins jenkins    13 Apr 30 11:15 libc++.so.1 -> libc++.so.1.0
-rwxr-xr-x 1 jenkins jenkins 1016K Nov  7 01:00 libc++.so.1.0
lrwxrwxrwx 1 jenkins jenkins    16 Apr 30 11:15 libc++abi.so.1 -> libc++abi.so.1.0
-rwxr-xr-x 1 jenkins jenkins  358K Nov  7 01:00 libc++abi.so.1.0
lrwxrwxrwx 1 jenkins jenkins    13 Apr 30 11:15 libgmssl.so -> libgmssl.so.3
lrwxrwxrwx 1 jenkins jenkins    15 Apr 30 11:15 libgmssl.so.3 -> libgmssl.so.3.0
-rwxr-xr-x 1 jenkins jenkins  2.6M Apr 30 10:34 libgmssl.so.3.0
-rwxr-xr-x 1 jenkins jenkins  272M Apr 30 11:16 libtiflash_proxy.so
-rwxr-xr-x 1 jenkins jenkins   50M Jul 29  2020 minio
-rwxr-xr-x 1 jenkins jenkins   37M Apr 30 16:11 pd-api-bench
-rwxr-xr-x 1 jenkins jenkins   44M Apr 30 16:10 pd-ctl
-rwxr-xr-x 1 jenkins jenkins   36M Apr 30 16:10 pd-heartbeat-bench
-rwxr-xr-x 1 jenkins jenkins   32M Apr 30 16:10 pd-recover
-rwxr-xr-x 1 jenkins jenkins  106M Apr 30 16:10 pd-server
-rwxr-xr-x 1 jenkins jenkins   26M Apr 30 16:10 pd-tso-bench
-rwxr-xr-x 1 jenkins jenkins  3.0M Apr 30 16:11 pd-ut
-rwxr-xr-x 1 jenkins jenkins   32M Apr 30 16:10 regions-dump
drwxr-sr-x 4 jenkins jenkins  4.0K May 10  2023 share
-rwxr-xr-x 1 jenkins jenkins   32M Apr 30 16:11 stores-dump
-rwxr-xr-x 1 jenkins jenkins  192M Sep 22  2023 sync_diff_inspector
-rwxr-xr-x 1 jenkins jenkins  208M May  1 10:57 tidb-server
-rwxr-xr-x 1 jenkins jenkins  380M Apr 30 11:15 tiflash
-rwxr-xr-x 1 jenkins jenkins  418M Apr 30 11:29 tikv-server
-rwxr-xr-x 1 jenkins jenkins  2.0M Apr 30 16:11 xprog
+ ./bin/tidb-server -V
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
+ ./bin/pd-server -V
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
+ ./bin/tikv-server -V
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
+ ./bin/tiflash --version
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
+ ./bin/sync_diff_inspector --version
App Name: sync_diff_inspector v2.0
Release Version: v7.4.0
Git Commit Hash: d671b0840063bc2532941f02e02e12627402844c
Git Branch: heads/refs/tags/v7.4.0
UTC Build Time: 2023-09-22 03:51:56
Go Version: go1.21.1
[Pipeline] }
[Pipeline] // retry
[Pipeline] }
[Pipeline] // dir
[Pipeline] dir
Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
[Pipeline] {
[Pipeline] cache
Cache restored successfully (binary/pingcap/tiflow/cdc-integration-test/rev-be15534-0de8dc3)
1191700480 bytes in 3.02 secs (394530886 bytes/sec)
[Pipeline] {
[Pipeline] sh
+ ls -alh ./bin
total 1.2G
drwxr-sr-x  2 jenkins jenkins 4.0K May  5 11:17 .
drwxr-sr-x 19 jenkins jenkins 4.0K May  5 11:17 ..
-rwxr-xr-x  1 jenkins jenkins 220M May  4 22:57 cdc
-rwxr-xr-x  1 jenkins jenkins 359M May  4 22:57 cdc.test
-rwxr-xr-x  1 jenkins jenkins 183M May  4 22:53 cdc_kafka_consumer
-rwxr-xr-x  1 jenkins jenkins 183M May  4 22:53 cdc_pulsar_consumer
-rwxr-xr-x  1 jenkins jenkins 182M May  4 22:52 cdc_storage_consumer
-rwxr-xr-x  1 jenkins jenkins  12M May  4 22:53 oauth2-server
+ '[' -f ./bin/cdc ']'
+ '[' -f ./bin/cdc_kafka_consumer ']'
+ '[' -f ./bin/cdc_storage_consumer ']'
+ '[' -f ./bin/cdc.test ']'
+ ls -alh ./bin
total 1.2G
drwxr-sr-x  2 jenkins jenkins 4.0K May  5 11:17 .
drwxr-sr-x 19 jenkins jenkins 4.0K May  5 11:17 ..
-rwxr-xr-x  1 jenkins jenkins 220M May  4 22:57 cdc
-rwxr-xr-x  1 jenkins jenkins 359M May  4 22:57 cdc.test
-rwxr-xr-x  1 jenkins jenkins 183M May  4 22:53 cdc_kafka_consumer
-rwxr-xr-x  1 jenkins jenkins 183M May  4 22:53 cdc_pulsar_consumer
-rwxr-xr-x  1 jenkins jenkins 182M May  4 22:52 cdc_storage_consumer
-rwxr-xr-x  1 jenkins jenkins  12M May  4 22:53 oauth2-server
+ ./bin/cdc version
Release Version: v8.2.0-alpha-53-g0de8dc3e4
Git Commit Hash: 0de8dc3e43ec741eba58047155ce7f3dba8eb4f7
Git Branch: HEAD
UTC Build Time: 2024-05-04 14:52:44
Go Version: go version go1.21.0 linux/amd64
Failpoint Build: true
[Pipeline] }
Cache not saved (binary/pingcap/tiflow/cdc-integration-test/rev-be15534-0de8dc3 already exists)
[Pipeline] // cache
[Pipeline] cache
Cache not restored (no such key found)
[Pipeline] {
[Pipeline] sh
+ cp -r ../third_party_download/bin/bin ../third_party_download/bin/etc ../third_party_download/bin/etcdctl ../third_party_download/bin/go-ycsb ../third_party_download/bin/jq ../third_party_download/bin/lib ../third_party_download/bin/libc++.so.1 ../third_party_download/bin/libc++.so.1.0 ../third_party_download/bin/libc++abi.so.1 ../third_party_download/bin/libc++abi.so.1.0 ../third_party_download/bin/libgmssl.so ../third_party_download/bin/libgmssl.so.3 ../third_party_download/bin/libgmssl.so.3.0 ../third_party_download/bin/libtiflash_proxy.so ../third_party_download/bin/minio ../third_party_download/bin/pd-api-bench ../third_party_download/bin/pd-ctl ../third_party_download/bin/pd-heartbeat-bench ../third_party_download/bin/pd-recover ../third_party_download/bin/pd-server ../third_party_download/bin/pd-tso-bench ../third_party_download/bin/pd-ut ../third_party_download/bin/regions-dump ../third_party_download/bin/share ../third_party_download/bin/stores-dump ../third_party_download/bin/sync_diff_inspector ../third_party_download/bin/tidb-server ../third_party_download/bin/tiflash ../third_party_download/bin/tikv-server ../third_party_download/bin/xprog ./bin/
+ ls -alh ./bin
total 3.0G
drwxr-sr-x  6 jenkins jenkins  4.0K May  5 11:17 .
drwxr-sr-x 19 jenkins jenkins  4.0K May  5 11:17 ..
drwxr-sr-x  2 jenkins jenkins  4.0K May  5 11:17 bin
-rwxr-xr-x  1 jenkins jenkins  220M May  4 22:57 cdc
-rwxr-xr-x  1 jenkins jenkins  359M May  4 22:57 cdc.test
-rwxr-xr-x  1 jenkins jenkins  183M May  4 22:53 cdc_kafka_consumer
-rwxr-xr-x  1 jenkins jenkins  183M May  4 22:53 cdc_pulsar_consumer
-rwxr-xr-x  1 jenkins jenkins  182M May  4 22:52 cdc_storage_consumer
drwxr-sr-x  4 jenkins jenkins  4.0K May  5 11:17 etc
-rwxr-xr-x  1 jenkins jenkins   17M May  5 11:17 etcdctl
-rwxr-xr-x  1 jenkins jenkins   44M May  5 11:17 go-ycsb
-rwxr-xr-x  1 jenkins jenkins  3.8M May  5 11:17 jq
drwxr-sr-x  3 jenkins jenkins  4.0K May  5 11:17 lib
lrwxrwxrwx  1 jenkins jenkins    13 May  5 11:17 libc++.so.1 -> libc++.so.1.0
-rwxr-xr-x  1 jenkins jenkins 1016K May  5 11:17 libc++.so.1.0
lrwxrwxrwx  1 jenkins jenkins    16 May  5 11:17 libc++abi.so.1 -> libc++abi.so.1.0
-rwxr-xr-x  1 jenkins jenkins  358K May  5 11:17 libc++abi.so.1.0
lrwxrwxrwx  1 jenkins jenkins    13 May  5 11:17 libgmssl.so -> libgmssl.so.3
lrwxrwxrwx  1 jenkins jenkins    15 May  5 11:17 libgmssl.so.3 -> libgmssl.so.3.0
-rwxr-xr-x  1 jenkins jenkins  2.6M May  5 11:17 libgmssl.so.3.0
-rwxr-xr-x  1 jenkins jenkins  272M May  5 11:17 libtiflash_proxy.so
-rwxr-xr-x  1 jenkins jenkins   50M May  5 11:17 minio
-rwxr-xr-x  1 jenkins jenkins   12M May  4 22:53 oauth2-server
-rwxr-xr-x  1 jenkins jenkins   37M May  5 11:17 pd-api-bench
-rwxr-xr-x  1 jenkins jenkins   44M May  5 11:17 pd-ctl
-rwxr-xr-x  1 jenkins jenkins   36M May  5 11:17 pd-heartbeat-bench
-rwxr-xr-x  1 jenkins jenkins   32M May  5 11:17 pd-recover
-rwxr-xr-x  1 jenkins jenkins  106M May  5 11:17 pd-server
-rwxr-xr-x  1 jenkins jenkins   26M May  5 11:17 pd-tso-bench
-rwxr-xr-x  1 jenkins jenkins  3.0M May  5 11:17 pd-ut
-rwxr-xr-x  1 jenkins jenkins   32M May  5 11:17 regions-dump
drwxr-sr-x  4 jenkins jenkins  4.0K May  5 11:17 share
-rwxr-xr-x  1 jenkins jenkins   32M May  5 11:17 stores-dump
-rwxr-xr-x  1 jenkins jenkins  192M May  5 11:17 sync_diff_inspector
-rwxr-xr-x  1 jenkins jenkins  208M May  5 11:17 tidb-server
-rwxr-xr-x  1 jenkins jenkins  380M May  5 11:17 tiflash
-rwxr-xr-x  1 jenkins jenkins  418M May  5 11:17 tikv-server
-rwxr-xr-x  1 jenkins jenkins  2.0M May  5 11:17 xprog
[Pipeline] }
Cache saved successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc)
3723625472 bytes in 75.42 secs (49372535 bytes/sec)
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
[Pipeline] stage
[Pipeline] { (Tests)
[Pipeline] parallel
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G00')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G01')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G02')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G03')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G04')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G05')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G06')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G07')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G08')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G09')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G10')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G11')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G12')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G13')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G14')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G15')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G16')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G17')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G00')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G01')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G02')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G03')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G04')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G05')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G06')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G07')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G08')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G09')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G10')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G11')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G12')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G13')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G14')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G15')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G16')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G17')
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] readTrusted
[Pipeline] readTrusted
[Pipeline] readTrusted
[Pipeline] readTrusted
[Pipeline] readTrusted
[Pipeline] readTrusted
[Pipeline] readTrusted
[Pipeline] readTrusted
[Pipeline] readTrusted
[Pipeline] readTrusted
[Pipeline] readTrusted
[Pipeline] readTrusted
[Pipeline] readTrusted
[Pipeline] readTrusted
[Pipeline] readTrusted
[Pipeline] readTrusted
[Pipeline] readTrusted
[Pipeline] readTrusted
Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git
[Pipeline] podTemplate
[Pipeline] {
[Pipeline] node
Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git
[Pipeline] podTemplate
[Pipeline] {
[Pipeline] node
Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd
Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git
[Pipeline] podTemplate
[Pipeline] {
[Pipeline] node
Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0
Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git
[Pipeline] podTemplate
[Pipeline] {
[Pipeline] node
Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git
[Pipeline] podTemplate
[Pipeline] {
[Pipeline] node
Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git
[Pipeline] podTemplate
[Pipeline] {
[Pipeline] node
Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1855-6skrl-cxvmd
Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pfbpq-pmpwv
Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git
[Pipeline] podTemplate
[Pipeline] {
[Pipeline] node
Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1855-2hg1d-32fk1
Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3cmhg-gg08m
Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1855-pq5wq-7s0v9
---
apiVersion: "v1"
kind: "Pod"
metadata:
  annotations:
    buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
    runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
  labels:
    jenkins/jenkins-jenkins-agent: "true"
    jenkins/label-digest: "5b79940d1ce0dcc716ecc3c51e751c8c1d29f6c2"
    jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1855-pq5wq"
  name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd"
  namespace: "jenkins-tiflow"
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
        - matchExpressions:
          - key: "kubernetes.io/arch"
            operator: "In"
            values:
            - "amd64"
  containers:
  - image: "wurstmeister/zookeeper"
    imagePullPolicy: "IfNotPresent"
    name: "zookeeper"
    resources:
      limits:
        cpu: "2000m"
        memory: "4Gi"
      requests:
        cpu: "2000m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - args:
    - "cat"
    image: "hub.pingcap.net/jenkins/golang-tini:1.21"
    imagePullPolicy: "Always"
    name: "golang"
    resources:
      limits:
        cpu: "12"
        memory: "32Gi"
      requests:
        cpu: "12"
        memory: "32Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_CREATE_TOPICS"
      value: "big-message-test:1:1"
    - name: "KAFKA_BROKER_ID"
      value: "1"
    - name: "KAFKA_SSL_KEYSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_ZOOKEEPER_CONNECT"
      value: "localhost:2181"
    - name: "KAFKA_MESSAGE_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_REPLICA_FETCH_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_ADVERTISED_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "ZK"
      value: "zk"
    - name: "KAFKA_SSL_KEYSTORE_LOCATION"
      value: "/tmp/kafka.server.keystore.jks"
    - name: "KAFKA_SSL_KEY_PASSWORD"
      value: "test1234"
    - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "KAFKA_SSL_TRUSTSTORE_LOCATION"
      value: "/tmp/kafka.server.truststore.jks"
    - name: "RACK_COMMAND"
      value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\
        \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\
        \ -o /tmp/kafka.server.truststore.jks"
    image: "wurstmeister/kafka:2.12-2.4.1"
    imagePullPolicy: "IfNotPresent"
    name: "kafka"
    resources:
      limits:
        cpu: "4000m"
        memory: "6Gi"
      requests:
        cpu: "4000m"
        memory: "6Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_SERVER"
      value: "127.0.0.1:9092"
    - name: "ZOOKEEPER_SERVER"
      value: "127.0.0.1:2181"
    - name: "DOWNSTREAM_DB_HOST"
      value: "127.0.0.1"
    - name: "USE_FLAT_MESSAGE"
      value: "true"
    - name: "DOWNSTREAM_DB_PORT"
      value: "3306"
    - name: "DB_NAME"
      value: "test"
    image: "rustinliu/ticdc-canal-json-adapter:latest"
    imagePullPolicy: "IfNotPresent"
    name: "canal-adapter"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/network-multitool"
    name: "net-tool"
    resources:
      limits:
        memory: "128Mi"
        cpu: "100m"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/python3-requests:latest"
    name: "report"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "MYSQL_ROOT_PASSWORD"
      value: ""
    - name: "MYSQL_USER"
      value: "mysqluser"
    - name: "MYSQL_PASSWORD"
      value: "mysqlpw"
    - name: "MYSQL_ALLOW_EMPTY_PASSWORD"
      value: "yes"
    - name: "MYSQL_TCP_PORT"
      value: "3310"
    image: "quay.io/debezium/example-mysql:2.4"
    imagePullPolicy: "IfNotPresent"
    name: "mysql"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "BOOTSTRAP_SERVERS"
      value: "127.0.0.1:9092"
    - name: "GROUP_ID"
      value: "1"
    - name: "CONFIG_STORAGE_TOPIC"
      value: "my_connect_configs"
    - name: "OFFSET_STORAGE_TOPIC"
      value: "my_connect_offsets"
    - name: "STATUS_STORAGE_TOPIC"
      value: "my_connect_statuses"
    image: "quay.io/debezium/connect:2.4"
    name: "connect"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "JENKINS_SECRET"
      value: "********"
    - name: "JENKINS_TUNNEL"
      value: "jenkins-agent.apps.svc.cluster.local:50000"
    - name: "JENKINS_AGENT_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd"
    - name: "JENKINS_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd"
    - name: "JENKINS_AGENT_WORKDIR"
      value: "/home/jenkins/agent"
    - name: "JENKINS_URL"
      value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/"
    image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2"
    name: "jnlp"
    resources:
      requests:
        memory: "256Mi"
        cpu: "100m"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  restartPolicy: "Never"
  securityContext:
    fsGroup: 1000
  volumes:
  - emptyDir: {}
    name: "volume-0"
  - emptyDir:
      medium: ""
    name: "workspace-volume"

Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
[Pipeline] {
[Pipeline] checkout
The recommended git tool is: git
Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git
[Pipeline] podTemplate
[Pipeline] {
[Pipeline] node
Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pvtgm-6sx1h
Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git
[Pipeline] podTemplate
No credentials specified
Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@6667dc1b; decorates RemoteLauncher[hudson.remoting.Channel@75d10a4d:JNLP4-connect connection from 10.233.67.79/10.233.67.79:46106] will be ignored (a typical symptom is the Git executable not being run inside a designated container)
Cloning the remote Git repository
Using shallow clone with depth 1
[Pipeline] {
Cloning repository https://github.com/PingCAP-QE/ci.git
Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1855-1nvp9-6cgzr
[Pipeline] node
Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git
[Pipeline] podTemplate
[Pipeline] {
Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1855-wqs8r-3gkqm
---
apiVersion: "v1"
kind: "Pod"
metadata:
  annotations:
    buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
    runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
  labels:
    jenkins/jenkins-jenkins-agent: "true"
    jenkins/label-digest: "a507333d3e67c06b844008eb0452e52d0b48c10e"
    jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1855-wqs8r"
  name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0"
  namespace: "jenkins-tiflow"
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
        - matchExpressions:
          - key: "kubernetes.io/arch"
            operator: "In"
            values:
            - "amd64"
  containers:
  - image: "wurstmeister/zookeeper"
    imagePullPolicy: "IfNotPresent"
    name: "zookeeper"
    resources:
      limits:
        cpu: "2000m"
        memory: "4Gi"
      requests:
        cpu: "2000m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - args:
    - "cat"
    image: "hub.pingcap.net/jenkins/golang-tini:1.21"
    imagePullPolicy: "Always"
    name: "golang"
    resources:
      limits:
        cpu: "12"
        memory: "32Gi"
      requests:
        cpu: "12"
        memory: "32Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_CREATE_TOPICS"
      value: "big-message-test:1:1"
    - name: "KAFKA_BROKER_ID"
      value: "1"
    - name: "KAFKA_SSL_KEYSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_ZOOKEEPER_CONNECT"
      value: "localhost:2181"
    - name: "KAFKA_MESSAGE_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_REPLICA_FETCH_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_ADVERTISED_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "ZK"
      value: "zk"
    - name: "KAFKA_SSL_KEYSTORE_LOCATION"
      value: "/tmp/kafka.server.keystore.jks"
    - name: "KAFKA_SSL_KEY_PASSWORD"
      value: "test1234"
    - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "KAFKA_SSL_TRUSTSTORE_LOCATION"
      value: "/tmp/kafka.server.truststore.jks"
    - name: "RACK_COMMAND"
      value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\
        \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\
        \ -o /tmp/kafka.server.truststore.jks"
    image: "wurstmeister/kafka:2.12-2.4.1"
    imagePullPolicy: "IfNotPresent"
    name: "kafka"
    resources:
      limits:
        cpu: "4000m"
        memory: "6Gi"
      requests:
        cpu: "4000m"
        memory: "6Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_SERVER"
      value: "127.0.0.1:9092"
    - name: "ZOOKEEPER_SERVER"
      value: "127.0.0.1:2181"
    - name: "DOWNSTREAM_DB_HOST"
      value: "127.0.0.1"
    - name: "USE_FLAT_MESSAGE"
      value: "true"
    - name: "DOWNSTREAM_DB_PORT"
      value: "3306"
    - name: "DB_NAME"
      value: "test"
    image: "rustinliu/ticdc-canal-json-adapter:latest"
    imagePullPolicy: "IfNotPresent"
    name: "canal-adapter"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/network-multitool"
    name: "net-tool"
    resources:
      limits:
        memory: "128Mi"
        cpu: "100m"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/python3-requests:latest"
    name: "report"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "MYSQL_ROOT_PASSWORD"
      value: ""
    - name: "MYSQL_USER"
      value: "mysqluser"
    - name: "MYSQL_PASSWORD"
      value: "mysqlpw"
    - name: "MYSQL_ALLOW_EMPTY_PASSWORD"
      value: "yes"
    - name: "MYSQL_TCP_PORT"
      value: "3310"
    image: "quay.io/debezium/example-mysql:2.4"
    imagePullPolicy: "IfNotPresent"
    name: "mysql"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "BOOTSTRAP_SERVERS"
      value: "127.0.0.1:9092"
    - name: "GROUP_ID"
      value: "1"
    - name: "CONFIG_STORAGE_TOPIC"
      value: "my_connect_configs"
    - name: "OFFSET_STORAGE_TOPIC"
      value: "my_connect_offsets"
    - name: "STATUS_STORAGE_TOPIC"
      value: "my_connect_statuses"
    image: "quay.io/debezium/connect:2.4"
    name: "connect"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "JENKINS_SECRET"
      value: "********"
    - name: "JENKINS_TUNNEL"
      value: "jenkins-agent.apps.svc.cluster.local:50000"
    - name: "JENKINS_AGENT_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0"
    - name: "JENKINS_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0"
    - name: "JENKINS_AGENT_WORKDIR"
      value: "/home/jenkins/agent"
    - name: "JENKINS_URL"
      value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/"
    image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2"
    name: "jnlp"
    resources:
      requests:
        memory: "256Mi"
        cpu: "100m"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  restartPolicy: "Never"
  securityContext:
    fsGroup: 1000
  volumes:
  - emptyDir: {}
    name: "volume-0"
  - emptyDir:
      medium: ""
    name: "workspace-volume"

 > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10
Fetching upstream changes from https://github.com/PingCAP-QE/ci.git
 > git --version # timeout=10
 > git --version # 'git version 2.39.2'
 > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5
Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f
[Pipeline] node
[Pipeline] {
[Pipeline] checkout
The recommended git tool is: git
Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git
[Pipeline] podTemplate
Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pfbpq-pmpwv is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1855-pfbpq-1d51d
---
apiVersion: "v1"
kind: "Pod"
metadata:
  annotations:
    buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
    runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
  labels:
    jenkins/jenkins-jenkins-agent: "true"
    jenkins/label-digest: "840bbae129246e4684891c92b969b73d7c6de8f1"
    jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1855-pfbpq"
  name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pfbpq-pmpwv"
  namespace: "jenkins-tiflow"
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
        - matchExpressions:
          - key: "kubernetes.io/arch"
            operator: "In"
            values:
            - "amd64"
  containers:
  - image: "wurstmeister/zookeeper"
    imagePullPolicy: "IfNotPresent"
    name: "zookeeper"
    resources:
      limits:
        cpu: "2000m"
        memory: "4Gi"
      requests:
        cpu: "2000m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - args:
    - "cat"
    image: "hub.pingcap.net/jenkins/golang-tini:1.21"
    imagePullPolicy: "Always"
    name: "golang"
    resources:
      limits:
        cpu: "12"
        memory: "32Gi"
      requests:
        cpu: "12"
        memory: "32Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_CREATE_TOPICS"
      value: "big-message-test:1:1"
    - name: "KAFKA_BROKER_ID"
      value: "1"
    - name: "KAFKA_SSL_KEYSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_ZOOKEEPER_CONNECT"
      value: "localhost:2181"
    - name: "KAFKA_MESSAGE_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_REPLICA_FETCH_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_ADVERTISED_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "ZK"
      value: "zk"
    - name: "KAFKA_SSL_KEYSTORE_LOCATION"
      value: "/tmp/kafka.server.keystore.jks"
    - name: "KAFKA_SSL_KEY_PASSWORD"
      value: "test1234"
    - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "KAFKA_SSL_TRUSTSTORE_LOCATION"
      value: "/tmp/kafka.server.truststore.jks"
    - name: "RACK_COMMAND"
      value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\
        \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\
        \ -o /tmp/kafka.server.truststore.jks"
    image: "wurstmeister/kafka:2.12-2.4.1"
    imagePullPolicy: "IfNotPresent"
    name: "kafka"
    resources:
      limits:
        cpu: "4000m"
        memory: "6Gi"
      requests:
        cpu: "4000m"
        memory: "6Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_SERVER"
      value: "127.0.0.1:9092"
    - name: "ZOOKEEPER_SERVER"
      value: "127.0.0.1:2181"
    - name: "DOWNSTREAM_DB_HOST"
      value: "127.0.0.1"
    - name: "USE_FLAT_MESSAGE"
      value: "true"
    - name: "DOWNSTREAM_DB_PORT"
      value: "3306"
    - name: "DB_NAME"
      value: "test"
    image: "rustinliu/ticdc-canal-json-adapter:latest"
    imagePullPolicy: "IfNotPresent"
    name: "canal-adapter"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/network-multitool"
    name: "net-tool"
    resources:
      limits:
        memory: "128Mi"
        cpu: "100m"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/python3-requests:latest"
    name: "report"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "MYSQL_ROOT_PASSWORD"
      value: ""
    - name: "MYSQL_USER"
      value: "mysqluser"
    - name: "MYSQL_PASSWORD"
      value: "mysqlpw"
    - name: "MYSQL_ALLOW_EMPTY_PASSWORD"
      value: "yes"
    - name: "MYSQL_TCP_PORT"
      value: "3310"
    image: "quay.io/debezium/example-mysql:2.4"
    imagePullPolicy: "IfNotPresent"
    name: "mysql"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "BOOTSTRAP_SERVERS"
      value: "127.0.0.1:9092"
    - name: "GROUP_ID"
      value: "1"
    - name: "CONFIG_STORAGE_TOPIC"
      value: "my_connect_configs"
    - name: "OFFSET_STORAGE_TOPIC"
      value: "my_connect_offsets"
    - name: "STATUS_STORAGE_TOPIC"
      value: "my_connect_statuses"
    image: "quay.io/debezium/connect:2.4"
    name: "connect"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "JENKINS_SECRET"
      value: "********"
    - name: "JENKINS_TUNNEL"
      value: "jenkins-agent.apps.svc.cluster.local:50000"
    - name: "JENKINS_AGENT_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pfbpq-pmpwv"
    - name: "JENKINS_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pfbpq-pmpwv"
    - name: "JENKINS_AGENT_WORKDIR"
      value: "/home/jenkins/agent"
    - name: "JENKINS_URL"
      value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/"
    image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2"
    name: "jnlp"
    resources:
      requests:
        memory: "256Mi"
        cpu: "100m"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  restartPolicy: "Never"
  securityContext:
    fsGroup: 1000
  volumes:
  - emptyDir: {}
    name: "volume-0"
  - emptyDir:
      medium: ""
    name: "workspace-volume"

[Pipeline] {
Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pfbpq-pmpwv in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
[Pipeline] {
[Pipeline] node
Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3
[Pipeline] checkout
The recommended git tool is: git
Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1855-6skrl-cxvmd is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1855-6skrl-llt65
---
apiVersion: "v1"
kind: "Pod"
metadata:
  annotations:
    buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
    runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
  labels:
    jenkins/jenkins-jenkins-agent: "true"
    jenkins/label-digest: "cbd5e06ea7604c164a032eeccd36d072b1ffed79"
    jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1855-6skrl"
  name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-6skrl-cxvmd"
  namespace: "jenkins-tiflow"
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
        - matchExpressions:
          - key: "kubernetes.io/arch"
            operator: "In"
            values:
            - "amd64"
  containers:
  - image: "wurstmeister/zookeeper"
    imagePullPolicy: "IfNotPresent"
    name: "zookeeper"
    resources:
      limits:
        cpu: "2000m"
        memory: "4Gi"
      requests:
        cpu: "2000m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - args:
    - "cat"
    image: "hub.pingcap.net/jenkins/golang-tini:1.21"
    imagePullPolicy: "Always"
    name: "golang"
    resources:
      limits:
        cpu: "12"
        memory: "32Gi"
      requests:
        cpu: "12"
        memory: "32Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_CREATE_TOPICS"
      value: "big-message-test:1:1"
    - name: "KAFKA_BROKER_ID"
      value: "1"
    - name: "KAFKA_SSL_KEYSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_ZOOKEEPER_CONNECT"
      value: "localhost:2181"
    - name: "KAFKA_MESSAGE_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_REPLICA_FETCH_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_ADVERTISED_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "ZK"
      value: "zk"
    - name: "KAFKA_SSL_KEYSTORE_LOCATION"
      value: "/tmp/kafka.server.keystore.jks"
    - name: "KAFKA_SSL_KEY_PASSWORD"
      value: "test1234"
    - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "KAFKA_SSL_TRUSTSTORE_LOCATION"
      value: "/tmp/kafka.server.truststore.jks"
    - name: "RACK_COMMAND"
      value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\
        \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\
        \ -o /tmp/kafka.server.truststore.jks"
    image: "wurstmeister/kafka:2.12-2.4.1"
    imagePullPolicy: "IfNotPresent"
    name: "kafka"
    resources:
      limits:
        cpu: "4000m"
        memory: "6Gi"
      requests:
        cpu: "4000m"
        memory: "6Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_SERVER"
      value: "127.0.0.1:9092"
    - name: "ZOOKEEPER_SERVER"
      value: "127.0.0.1:2181"
    - name: "DOWNSTREAM_DB_HOST"
      value: "127.0.0.1"
    - name: "USE_FLAT_MESSAGE"
      value: "true"
    - name: "DOWNSTREAM_DB_PORT"
      value: "3306"
    - name: "DB_NAME"
      value: "test"
    image: "rustinliu/ticdc-canal-json-adapter:latest"
    imagePullPolicy: "IfNotPresent"
    name: "canal-adapter"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/network-multitool"
    name: "net-tool"
    resources:
      limits:
        memory: "128Mi"
        cpu: "100m"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/python3-requests:latest"
    name: "report"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "MYSQL_ROOT_PASSWORD"
      value: ""
    - name: "MYSQL_USER"
      value: "mysqluser"
    - name: "MYSQL_PASSWORD"
      value: "mysqlpw"
    - name: "MYSQL_ALLOW_EMPTY_PASSWORD"
      value: "yes"
    - name: "MYSQL_TCP_PORT"
      value: "3310"
    image: "quay.io/debezium/example-mysql:2.4"
    imagePullPolicy: "IfNotPresent"
    name: "mysql"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "BOOTSTRAP_SERVERS"
      value: "127.0.0.1:9092"
    - name: "GROUP_ID"
      value: "1"
    - name: "CONFIG_STORAGE_TOPIC"
      value: "my_connect_configs"
    - name: "OFFSET_STORAGE_TOPIC"
      value: "my_connect_offsets"
    - name: "STATUS_STORAGE_TOPIC"
      value: "my_connect_statuses"
    image: "quay.io/debezium/connect:2.4"
    name: "connect"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "JENKINS_SECRET"
      value: "********"
    - name: "JENKINS_TUNNEL"
      value: "jenkins-agent.apps.svc.cluster.local:50000"
    - name: "JENKINS_AGENT_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-6skrl-cxvmd"
    - name: "JENKINS_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-6skrl-cxvmd"
    - name: "JENKINS_AGENT_WORKDIR"
      value: "/home/jenkins/agent"
    - name: "JENKINS_URL"
      value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/"
    image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2"
    name: "jnlp"
    resources:
      requests:
        memory: "256Mi"
        cpu: "100m"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  restartPolicy: "Never"
  securityContext:
    fsGroup: 1000
  volumes:
  - emptyDir: {}
    name: "volume-0"
  - emptyDir:
      medium: ""
    name: "workspace-volume"

Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1855-6skrl-cxvmd in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
[Pipeline] {
No credentials specified
Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@44663ede; decorates RemoteLauncher[hudson.remoting.Channel@494524b:JNLP4-connect connection from 10.233.108.126/10.233.108.126:34956] will be ignored (a typical symptom is the Git executable not being run inside a designated container)
Cloning the remote Git repository
Using shallow clone with depth 1
[Pipeline] checkout
The recommended git tool is: git
Cloning repository https://github.com/PingCAP-QE/ci.git
 > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10
Fetching upstream changes from https://github.com/PingCAP-QE/ci.git
 > git --version # timeout=10
 > git --version # 'git version 2.39.2'
 > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5
Avoid second fetch
Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main)
Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1855-2hg1d-32fk1 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1855-2hg1d-sd8s8
---
apiVersion: "v1"
kind: "Pod"
metadata:
  annotations:
    buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
    runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
  labels:
    jenkins/jenkins-jenkins-agent: "true"
    jenkins/label-digest: "47566075d204f78e1a6f0f6a3dbbe71f4961c976"
    jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1855-2hg1d"
  name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-2hg1d-32fk1"
  namespace: "jenkins-tiflow"
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
        - matchExpressions:
          - key: "kubernetes.io/arch"
            operator: "In"
            values:
            - "amd64"
  containers:
  - image: "wurstmeister/zookeeper"
    imagePullPolicy: "IfNotPresent"
    name: "zookeeper"
    resources:
      limits:
        cpu: "2000m"
        memory: "4Gi"
      requests:
        cpu: "2000m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - args:
    - "cat"
    image: "hub.pingcap.net/jenkins/golang-tini:1.21"
    imagePullPolicy: "Always"
    name: "golang"
    resources:
      limits:
        cpu: "12"
        memory: "32Gi"
      requests:
        cpu: "12"
        memory: "32Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_CREATE_TOPICS"
      value: "big-message-test:1:1"
    - name: "KAFKA_BROKER_ID"
      value: "1"
    - name: "KAFKA_SSL_KEYSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_ZOOKEEPER_CONNECT"
      value: "localhost:2181"
    - name: "KAFKA_MESSAGE_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_REPLICA_FETCH_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_ADVERTISED_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "ZK"
      value: "zk"
    - name: "KAFKA_SSL_KEYSTORE_LOCATION"
      value: "/tmp/kafka.server.keystore.jks"
    - name: "KAFKA_SSL_KEY_PASSWORD"
      value: "test1234"
    - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "KAFKA_SSL_TRUSTSTORE_LOCATION"
      value: "/tmp/kafka.server.truststore.jks"
    - name: "RACK_COMMAND"
      value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\
        \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\
        \ -o /tmp/kafka.server.truststore.jks"
    image: "wurstmeister/kafka:2.12-2.4.1"
    imagePullPolicy: "IfNotPresent"
    name: "kafka"
    resources:
      limits:
        cpu: "4000m"
        memory: "6Gi"
      requests:
        cpu: "4000m"
        memory: "6Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_SERVER"
      value: "127.0.0.1:9092"
    - name: "ZOOKEEPER_SERVER"
      value: "127.0.0.1:2181"
    - name: "DOWNSTREAM_DB_HOST"
      value: "127.0.0.1"
    - name: "USE_FLAT_MESSAGE"
      value: "true"
    - name: "DOWNSTREAM_DB_PORT"
      value: "3306"
    - name: "DB_NAME"
      value: "test"
    image: "rustinliu/ticdc-canal-json-adapter:latest"
    imagePullPolicy: "IfNotPresent"
    name: "canal-adapter"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/network-multitool"
    name: "net-tool"
    resources:
      limits:
        memory: "128Mi"
        cpu: "100m"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/python3-requests:latest"
    name: "report"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "MYSQL_ROOT_PASSWORD"
      value: ""
    - name: "MYSQL_USER"
      value: "mysqluser"
    - name: "MYSQL_PASSWORD"
      value: "mysqlpw"
    - name: "MYSQL_ALLOW_EMPTY_PASSWORD"
      value: "yes"
    - name: "MYSQL_TCP_PORT"
      value: "3310"
    image: "quay.io/debezium/example-mysql:2.4"
    imagePullPolicy: "IfNotPresent"
    name: "mysql"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "BOOTSTRAP_SERVERS"
      value: "127.0.0.1:9092"
    - name: "GROUP_ID"
      value: "1"
    - name: "CONFIG_STORAGE_TOPIC"
      value: "my_connect_configs"
    - name: "OFFSET_STORAGE_TOPIC"
      value: "my_connect_offsets"
    - name: "STATUS_STORAGE_TOPIC"
      value: "my_connect_statuses"
    image: "quay.io/debezium/connect:2.4"
    name: "connect"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "JENKINS_SECRET"
      value: "********"
    - name: "JENKINS_TUNNEL"
      value: "jenkins-agent.apps.svc.cluster.local:50000"
    - name: "JENKINS_AGENT_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-2hg1d-32fk1"
    - name: "JENKINS_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-2hg1d-32fk1"
    - name: "JENKINS_AGENT_WORKDIR"
      value: "/home/jenkins/agent"
    - name: "JENKINS_URL"
      value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/"
    image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2"
    name: "jnlp"
    resources:
      requests:
        memory: "256Mi"
        cpu: "100m"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  restartPolicy: "Never"
  securityContext:
    fsGroup: 1000
  volumes:
  - emptyDir: {}
    name: "volume-0"
  - emptyDir:
      medium: ""
    name: "workspace-volume"

Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1855-2hg1d-32fk1 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
[Pipeline] {
Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3cmhg-gg08m is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1855-3cmhg-fhf34
---
apiVersion: "v1"
kind: "Pod"
metadata:
  annotations:
    buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
    runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
  labels:
    jenkins/jenkins-jenkins-agent: "true"
    jenkins/label-digest: "f9c68eb3f3281d45256be9020ca2466860de5cb9"
    jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1855-3cmhg"
  name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3cmhg-gg08m"
  namespace: "jenkins-tiflow"
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
        - matchExpressions:
          - key: "kubernetes.io/arch"
            operator: "In"
            values:
            - "amd64"
  containers:
  - image: "wurstmeister/zookeeper"
    imagePullPolicy: "IfNotPresent"
    name: "zookeeper"
    resources:
      limits:
        cpu: "2000m"
        memory: "4Gi"
      requests:
        cpu: "2000m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - args:
    - "cat"
    image: "hub.pingcap.net/jenkins/golang-tini:1.21"
    imagePullPolicy: "Always"
    name: "golang"
    resources:
      limits:
        cpu: "12"
        memory: "32Gi"
      requests:
        cpu: "12"
        memory: "32Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_CREATE_TOPICS"
      value: "big-message-test:1:1"
    - name: "KAFKA_BROKER_ID"
      value: "1"
    - name: "KAFKA_SSL_KEYSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_ZOOKEEPER_CONNECT"
      value: "localhost:2181"
    - name: "KAFKA_MESSAGE_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_REPLICA_FETCH_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_ADVERTISED_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "ZK"
      value: "zk"
    - name: "KAFKA_SSL_KEYSTORE_LOCATION"
      value: "/tmp/kafka.server.keystore.jks"
    - name: "KAFKA_SSL_KEY_PASSWORD"
      value: "test1234"
    - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "KAFKA_SSL_TRUSTSTORE_LOCATION"
      value: "/tmp/kafka.server.truststore.jks"
    - name: "RACK_COMMAND"
      value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\
        \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\
        \ -o /tmp/kafka.server.truststore.jks"
    image: "wurstmeister/kafka:2.12-2.4.1"
    imagePullPolicy: "IfNotPresent"
    name: "kafka"
    resources:
      limits:
        cpu: "4000m"
        memory: "6Gi"
      requests:
        cpu: "4000m"
        memory: "6Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_SERVER"
      value: "127.0.0.1:9092"
    - name: "ZOOKEEPER_SERVER"
      value: "127.0.0.1:2181"
    - name: "DOWNSTREAM_DB_HOST"
      value: "127.0.0.1"
    - name: "USE_FLAT_MESSAGE"
      value: "true"
    - name: "DOWNSTREAM_DB_PORT"
      value: "3306"
    - name: "DB_NAME"
      value: "test"
    image: "rustinliu/ticdc-canal-json-adapter:latest"
    imagePullPolicy: "IfNotPresent"
    name: "canal-adapter"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/network-multitool"
    name: "net-tool"
    resources:
      limits:
        memory: "128Mi"
        cpu: "100m"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/python3-requests:latest"
    name: "report"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "MYSQL_ROOT_PASSWORD"
      value: ""
    - name: "MYSQL_USER"
      value: "mysqluser"
    - name: "MYSQL_PASSWORD"
      value: "mysqlpw"
    - name: "MYSQL_ALLOW_EMPTY_PASSWORD"
      value: "yes"
    - name: "MYSQL_TCP_PORT"
      value: "3310"
    image: "quay.io/debezium/example-mysql:2.4"
    imagePullPolicy: "IfNotPresent"
    name: "mysql"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "BOOTSTRAP_SERVERS"
      value: "127.0.0.1:9092"
    - name: "GROUP_ID"
      value: "1"
    - name: "CONFIG_STORAGE_TOPIC"
      value: "my_connect_configs"
    - name: "OFFSET_STORAGE_TOPIC"
      value: "my_connect_offsets"
    - name: "STATUS_STORAGE_TOPIC"
      value: "my_connect_statuses"
    image: "quay.io/debezium/connect:2.4"
    name: "connect"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "JENKINS_SECRET"
      value: "********"
    - name: "JENKINS_TUNNEL"
      value: "jenkins-agent.apps.svc.cluster.local:50000"
    - name: "JENKINS_AGENT_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3cmhg-gg08m"
    - name: "JENKINS_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3cmhg-gg08m"
    - name: "JENKINS_AGENT_WORKDIR"
      value: "/home/jenkins/agent"
    - name: "JENKINS_URL"
      value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/"
    image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2"
    name: "jnlp"
    resources:
      requests:
        memory: "256Mi"
        cpu: "100m"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  restartPolicy: "Never"
  securityContext:
    fsGroup: 1000
  volumes:
  - emptyDir: {}
    name: "volume-0"
  - emptyDir:
      medium: ""
    name: "workspace-volume"

Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3cmhg-gg08m in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
[Pipeline] checkout
Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git
Commit message: "fix(br): use failpoint tidb-server instead (#2951)"
[Pipeline] {
The recommended git tool is: git
[Pipeline] podTemplate
[Pipeline] {
[Pipeline] node
[Pipeline] withEnv
[Pipeline] {
[Pipeline] checkout
Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0
[Pipeline] container
[Pipeline] {
The recommended git tool is: git
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] timeout
Timeout set to expire in 45 min
[Pipeline] {
[Pipeline] withCredentials
 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git rev-parse origin/main^{commit} # timeout=10
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10
Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN
[Pipeline] {
[Pipeline] dir
Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
[Pipeline] {
No credentials specified
Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@64852838; decorates RemoteLauncher[hudson.remoting.Channel@4e1f7062:JNLP4-connect connection from 10.233.73.139/10.233.73.139:49510] will be ignored (a typical symptom is the Git executable not being run inside a designated container)
Cloning the remote Git repository
Using shallow clone with depth 1
[Pipeline] cache
No credentials specified
Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@283230f8; decorates RemoteLauncher[hudson.remoting.Channel@262615d1:JNLP4-connect connection from 10.233.70.77/10.233.70.77:56062] will be ignored (a typical symptom is the Git executable not being run inside a designated container)
Cloning the remote Git repository
Using shallow clone with depth 1
No credentials specified
Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@506b6c06; decorates RemoteLauncher[hudson.remoting.Channel@78ceb987:JNLP4-connect connection from 10.233.105.54/10.233.105.54:40166] will be ignored (a typical symptom is the Git executable not being run inside a designated container)
Cloning the remote Git repository
Using shallow clone with depth 1
Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git
No credentials specified
Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@574d7bb2; decorates RemoteLauncher[hudson.remoting.Channel@2d12fbac:JNLP4-connect connection from 10.233.84.116/10.233.84.116:35810] will be ignored (a typical symptom is the Git executable not being run inside a designated container)
Cloning the remote Git repository
Using shallow clone with depth 1
Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1855-703xz-4qz9g
Cloning repository https://github.com/PingCAP-QE/ci.git
 > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10
Fetching upstream changes from https://github.com/PingCAP-QE/ci.git
 > git --version # timeout=10
 > git --version # 'git version 2.39.2'
 > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5
Cloning repository https://github.com/PingCAP-QE/ci.git
 > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10
Fetching upstream changes from https://github.com/PingCAP-QE/ci.git
 > git --version # timeout=10
 > git --version # 'git version 2.39.2'
 > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5
Cloning repository https://github.com/PingCAP-QE/ci.git
 > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10
Fetching upstream changes from https://github.com/PingCAP-QE/ci.git
 > git --version # timeout=10
 > git --version # 'git version 2.39.2'
 > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5
 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
Cloning repository https://github.com/PingCAP-QE/ci.git
 > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10
Fetching upstream changes from https://github.com/PingCAP-QE/ci.git
 > git --version # timeout=10
 > git --version # 'git version 2.39.2'
 > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5
Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pvtgm-6sx1h is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1855-pvtgm-s9g1j
---
apiVersion: "v1"
kind: "Pod"
metadata:
  annotations:
    buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
    runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
  labels:
    jenkins/jenkins-jenkins-agent: "true"
    jenkins/label-digest: "c142a591d561620740da3b2da0cb723f2fc77367"
    jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1855-pvtgm"
  name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pvtgm-6sx1h"
  namespace: "jenkins-tiflow"
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
        - matchExpressions:
          - key: "kubernetes.io/arch"
            operator: "In"
            values:
            - "amd64"
  containers:
  - image: "wurstmeister/zookeeper"
    imagePullPolicy: "IfNotPresent"
    name: "zookeeper"
    resources:
      limits:
        cpu: "2000m"
        memory: "4Gi"
      requests:
        cpu: "2000m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - args:
    - "cat"
    image: "hub.pingcap.net/jenkins/golang-tini:1.21"
    imagePullPolicy: "Always"
    name: "golang"
    resources:
      limits:
        cpu: "12"
        memory: "32Gi"
      requests:
        cpu: "12"
        memory: "32Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_CREATE_TOPICS"
      value: "big-message-test:1:1"
    - name: "KAFKA_BROKER_ID"
      value: "1"
    - name: "KAFKA_SSL_KEYSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_ZOOKEEPER_CONNECT"
      value: "localhost:2181"
    - name: "KAFKA_MESSAGE_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_REPLICA_FETCH_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_ADVERTISED_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "ZK"
      value: "zk"
    - name: "KAFKA_SSL_KEYSTORE_LOCATION"
      value: "/tmp/kafka.server.keystore.jks"
    - name: "KAFKA_SSL_KEY_PASSWORD"
      value: "test1234"
    - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "KAFKA_SSL_TRUSTSTORE_LOCATION"
      value: "/tmp/kafka.server.truststore.jks"
    - name: "RACK_COMMAND"
      value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\
        \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\
        \ -o /tmp/kafka.server.truststore.jks"
    image: "wurstmeister/kafka:2.12-2.4.1"
    imagePullPolicy: "IfNotPresent"
    name: "kafka"
    resources:
      limits:
        cpu: "4000m"
        memory: "6Gi"
      requests:
        cpu: "4000m"
        memory: "6Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_SERVER"
      value: "127.0.0.1:9092"
    - name: "ZOOKEEPER_SERVER"
      value: "127.0.0.1:2181"
    - name: "DOWNSTREAM_DB_HOST"
      value: "127.0.0.1"
    - name: "USE_FLAT_MESSAGE"
      value: "true"
    - name: "DOWNSTREAM_DB_PORT"
      value: "3306"
    - name: "DB_NAME"
      value: "test"
    image: "rustinliu/ticdc-canal-json-adapter:latest"
    imagePullPolicy: "IfNotPresent"
    name: "canal-adapter"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/network-multitool"
    name: "net-tool"
    resources:
      limits:
        memory: "128Mi"
        cpu: "100m"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/python3-requests:latest"
    name: "report"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "MYSQL_ROOT_PASSWORD"
      value: ""
    - name: "MYSQL_USER"
      value: "mysqluser"
    - name: "MYSQL_PASSWORD"
      value: "mysqlpw"
    - name: "MYSQL_ALLOW_EMPTY_PASSWORD"
      value: "yes"
    - name: "MYSQL_TCP_PORT"
      value: "3310"
    image: "quay.io/debezium/example-mysql:2.4"
    imagePullPolicy: "IfNotPresent"
    name: "mysql"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "BOOTSTRAP_SERVERS"
      value: "127.0.0.1:9092"
    - name: "GROUP_ID"
      value: "1"
    - name: "CONFIG_STORAGE_TOPIC"
      value: "my_connect_configs"
    - name: "OFFSET_STORAGE_TOPIC"
      value: "my_connect_offsets"
    - name: "STATUS_STORAGE_TOPIC"
      value: "my_connect_statuses"
    image: "quay.io/debezium/connect:2.4"
    name: "connect"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "JENKINS_SECRET"
      value: "********"
    - name: "JENKINS_TUNNEL"
      value: "jenkins-agent.apps.svc.cluster.local:50000"
    - name: "JENKINS_AGENT_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pvtgm-6sx1h"
    - name: "JENKINS_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pvtgm-6sx1h"
    - name: "JENKINS_AGENT_WORKDIR"
      value: "/home/jenkins/agent"
    - name: "JENKINS_URL"
      value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/"
    image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2"
    name: "jnlp"
    resources:
      requests:
        memory: "256Mi"
        cpu: "100m"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  restartPolicy: "Never"
  securityContext:
    fsGroup: 1000
  volumes:
  - emptyDir: {}
    name: "volume-0"
  - emptyDir:
      medium: ""
    name: "workspace-volume"

Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git
Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pvtgm-6sx1h in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
Avoid second fetch
Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main)
Commit message: "fix(br): use failpoint tidb-server instead (#2951)"
 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
Avoid second fetch
Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main)
Commit message: "fix(br): use failpoint tidb-server instead (#2951)"
Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git
 > git rev-parse origin/main^{commit} # timeout=10
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10
Avoid second fetch
Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main)
 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
Avoid second fetch
Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main)
Commit message: "fix(br): use failpoint tidb-server instead (#2951)"
 > git rev-parse origin/main^{commit} # timeout=10
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10
Commit message: "fix(br): use failpoint tidb-server instead (#2951)"
Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git
Avoid second fetch
 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git rev-parse origin/main^{commit} # timeout=10
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10
Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main)
Commit message: "fix(br): use failpoint tidb-server instead (#2951)"
 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git rev-parse origin/main^{commit} # timeout=10
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10
 > git rev-parse origin/main^{commit} # timeout=10
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10
Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git
Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1855-1nvp9-6cgzr is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1855-1nvp9-398nz
---
apiVersion: "v1"
kind: "Pod"
metadata:
  annotations:
    buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
    runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
  labels:
    jenkins/jenkins-jenkins-agent: "true"
    jenkins/label-digest: "50949ef1c5dd89388ab9c98e09582de7051b4c34"
    jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1855-1nvp9"
  name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-1nvp9-6cgzr"
  namespace: "jenkins-tiflow"
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
        - matchExpressions:
          - key: "kubernetes.io/arch"
            operator: "In"
            values:
            - "amd64"
  containers:
  - image: "wurstmeister/zookeeper"
    imagePullPolicy: "IfNotPresent"
    name: "zookeeper"
    resources:
      limits:
        cpu: "2000m"
        memory: "4Gi"
      requests:
        cpu: "2000m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - args:
    - "cat"
    image: "hub.pingcap.net/jenkins/golang-tini:1.21"
    imagePullPolicy: "Always"
    name: "golang"
    resources:
      limits:
        cpu: "12"
        memory: "32Gi"
      requests:
        cpu: "12"
        memory: "32Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_CREATE_TOPICS"
      value: "big-message-test:1:1"
    - name: "KAFKA_BROKER_ID"
      value: "1"
    - name: "KAFKA_SSL_KEYSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_ZOOKEEPER_CONNECT"
      value: "localhost:2181"
    - name: "KAFKA_MESSAGE_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_REPLICA_FETCH_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_ADVERTISED_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "ZK"
      value: "zk"
    - name: "KAFKA_SSL_KEYSTORE_LOCATION"
      value: "/tmp/kafka.server.keystore.jks"
    - name: "KAFKA_SSL_KEY_PASSWORD"
      value: "test1234"
    - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "KAFKA_SSL_TRUSTSTORE_LOCATION"
      value: "/tmp/kafka.server.truststore.jks"
    - name: "RACK_COMMAND"
      value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\
        \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\
        \ -o /tmp/kafka.server.truststore.jks"
    image: "wurstmeister/kafka:2.12-2.4.1"
    imagePullPolicy: "IfNotPresent"
    name: "kafka"
    resources:
      limits:
        cpu: "4000m"
        memory: "6Gi"
      requests:
        cpu: "4000m"
        memory: "6Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_SERVER"
      value: "127.0.0.1:9092"
    - name: "ZOOKEEPER_SERVER"
      value: "127.0.0.1:2181"
    - name: "DOWNSTREAM_DB_HOST"
      value: "127.0.0.1"
    - name: "USE_FLAT_MESSAGE"
      value: "true"
    - name: "DOWNSTREAM_DB_PORT"
      value: "3306"
    - name: "DB_NAME"
      value: "test"
    image: "rustinliu/ticdc-canal-json-adapter:latest"
    imagePullPolicy: "IfNotPresent"
    name: "canal-adapter"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/network-multitool"
    name: "net-tool"
    resources:
      limits:
        memory: "128Mi"
        cpu: "100m"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/python3-requests:latest"
    name: "report"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "MYSQL_ROOT_PASSWORD"
      value: ""
    - name: "MYSQL_USER"
      value: "mysqluser"
    - name: "MYSQL_PASSWORD"
      value: "mysqlpw"
    - name: "MYSQL_ALLOW_EMPTY_PASSWORD"
      value: "yes"
    - name: "MYSQL_TCP_PORT"
      value: "3310"
    image: "quay.io/debezium/example-mysql:2.4"
    imagePullPolicy: "IfNotPresent"
    name: "mysql"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "BOOTSTRAP_SERVERS"
      value: "127.0.0.1:9092"
    - name: "GROUP_ID"
      value: "1"
    - name: "CONFIG_STORAGE_TOPIC"
      value: "my_connect_configs"
    - name: "OFFSET_STORAGE_TOPIC"
      value: "my_connect_offsets"
    - name: "STATUS_STORAGE_TOPIC"
      value: "my_connect_statuses"
    image: "quay.io/debezium/connect:2.4"
    name: "connect"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "JENKINS_SECRET"
      value: "********"
    - name: "JENKINS_TUNNEL"
      value: "jenkins-agent.apps.svc.cluster.local:50000"
    - name: "JENKINS_AGENT_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-1nvp9-6cgzr"
    - name: "JENKINS_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-1nvp9-6cgzr"
    - name: "JENKINS_AGENT_WORKDIR"
      value: "/home/jenkins/agent"
    - name: "JENKINS_URL"
      value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/"
    image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2"
    name: "jnlp"
    resources:
      requests:
        memory: "256Mi"
        cpu: "100m"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  restartPolicy: "Never"
  securityContext:
    fsGroup: 1000
  volumes:
  - emptyDir: {}
    name: "volume-0"
  - emptyDir:
      medium: ""
    name: "workspace-volume"

Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1855-1nvp9-6cgzr in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
Obtained pipelines/pingcap/tiflow/latest/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git
Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1855-703xz-4qz9g is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1855-703xz-g4c4j
---
apiVersion: "v1"
kind: "Pod"
metadata:
  annotations:
    buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
    runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
  labels:
    jenkins/jenkins-jenkins-agent: "true"
    jenkins/label-digest: "b453acef149ad9b28d2c0b0e2cadbd27dff678c6"
    jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1855-703xz"
  name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-703xz-4qz9g"
  namespace: "jenkins-tiflow"
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
        - matchExpressions:
          - key: "kubernetes.io/arch"
            operator: "In"
            values:
            - "amd64"
  containers:
  - image: "wurstmeister/zookeeper"
    imagePullPolicy: "IfNotPresent"
    name: "zookeeper"
    resources:
      limits:
        cpu: "2000m"
        memory: "4Gi"
      requests:
        cpu: "2000m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - args:
    - "cat"
    image: "hub.pingcap.net/jenkins/golang-tini:1.21"
    imagePullPolicy: "Always"
    name: "golang"
    resources:
      limits:
        cpu: "12"
        memory: "32Gi"
      requests:
        cpu: "12"
        memory: "32Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_CREATE_TOPICS"
      value: "big-message-test:1:1"
    - name: "KAFKA_BROKER_ID"
      value: "1"
    - name: "KAFKA_SSL_KEYSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_ZOOKEEPER_CONNECT"
      value: "localhost:2181"
    - name: "KAFKA_MESSAGE_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_REPLICA_FETCH_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_ADVERTISED_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "ZK"
      value: "zk"
    - name: "KAFKA_SSL_KEYSTORE_LOCATION"
      value: "/tmp/kafka.server.keystore.jks"
    - name: "KAFKA_SSL_KEY_PASSWORD"
      value: "test1234"
    - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "KAFKA_SSL_TRUSTSTORE_LOCATION"
      value: "/tmp/kafka.server.truststore.jks"
    - name: "RACK_COMMAND"
      value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\
        \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\
        \ -o /tmp/kafka.server.truststore.jks"
    image: "wurstmeister/kafka:2.12-2.4.1"
    imagePullPolicy: "IfNotPresent"
    name: "kafka"
    resources:
      limits:
        cpu: "4000m"
        memory: "6Gi"
      requests:
        cpu: "4000m"
        memory: "6Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_SERVER"
      value: "127.0.0.1:9092"
    - name: "ZOOKEEPER_SERVER"
      value: "127.0.0.1:2181"
    - name: "DOWNSTREAM_DB_HOST"
      value: "127.0.0.1"
    - name: "USE_FLAT_MESSAGE"
      value: "true"
    - name: "DOWNSTREAM_DB_PORT"
      value: "3306"
    - name: "DB_NAME"
      value: "test"
    image: "rustinliu/ticdc-canal-json-adapter:latest"
    imagePullPolicy: "IfNotPresent"
    name: "canal-adapter"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/network-multitool"
    name: "net-tool"
    resources:
      limits:
        memory: "128Mi"
        cpu: "100m"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/python3-requests:latest"
    name: "report"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "MYSQL_ROOT_PASSWORD"
      value: ""
    - name: "MYSQL_USER"
      value: "mysqluser"
    - name: "MYSQL_PASSWORD"
      value: "mysqlpw"
    - name: "MYSQL_ALLOW_EMPTY_PASSWORD"
      value: "yes"
    - name: "MYSQL_TCP_PORT"
      value: "3310"
    image: "quay.io/debezium/example-mysql:2.4"
    imagePullPolicy: "IfNotPresent"
    name: "mysql"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "BOOTSTRAP_SERVERS"
      value: "127.0.0.1:9092"
    - name: "GROUP_ID"
      value: "1"
    - name: "CONFIG_STORAGE_TOPIC"
      value: "my_connect_configs"
    - name: "OFFSET_STORAGE_TOPIC"
      value: "my_connect_offsets"
    - name: "STATUS_STORAGE_TOPIC"
      value: "my_connect_statuses"
    image: "quay.io/debezium/connect:2.4"
    name: "connect"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "JENKINS_SECRET"
      value: "********"
    - name: "JENKINS_TUNNEL"
      value: "jenkins-agent.apps.svc.cluster.local:50000"
    - name: "JENKINS_AGENT_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-703xz-4qz9g"
    - name: "JENKINS_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-703xz-4qz9g"
    - name: "JENKINS_AGENT_WORKDIR"
      value: "/home/jenkins/agent"
    - name: "JENKINS_URL"
      value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/"
    image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2"
    name: "jnlp"
    resources:
      requests:
        memory: "256Mi"
        cpu: "100m"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  restartPolicy: "Never"
  securityContext:
    fsGroup: 1000
  volumes:
  - emptyDir: {}
    name: "volume-0"
  - emptyDir:
      medium: ""
    name: "workspace-volume"

Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1855-703xz-4qz9g in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1855-lf7fd-1h1pt
---
apiVersion: "v1"
kind: "Pod"
metadata:
  annotations:
    buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
    runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
  labels:
    jenkins/jenkins-jenkins-agent: "true"
    jenkins/label-digest: "d2b297814165b44e46dec26b90cb74ced88d4755"
    jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1855-lf7fd"
  name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f"
  namespace: "jenkins-tiflow"
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
        - matchExpressions:
          - key: "kubernetes.io/arch"
            operator: "In"
            values:
            - "amd64"
  containers:
  - image: "wurstmeister/zookeeper"
    imagePullPolicy: "IfNotPresent"
    name: "zookeeper"
    resources:
      limits:
        cpu: "2000m"
        memory: "4Gi"
      requests:
        cpu: "2000m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - args:
    - "cat"
    image: "hub.pingcap.net/jenkins/golang-tini:1.21"
    imagePullPolicy: "Always"
    name: "golang"
    resources:
      limits:
        cpu: "12"
        memory: "32Gi"
      requests:
        cpu: "12"
        memory: "32Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_CREATE_TOPICS"
      value: "big-message-test:1:1"
    - name: "KAFKA_BROKER_ID"
      value: "1"
    - name: "KAFKA_SSL_KEYSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_ZOOKEEPER_CONNECT"
      value: "localhost:2181"
    - name: "KAFKA_MESSAGE_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_REPLICA_FETCH_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_ADVERTISED_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "ZK"
      value: "zk"
    - name: "KAFKA_SSL_KEYSTORE_LOCATION"
      value: "/tmp/kafka.server.keystore.jks"
    - name: "KAFKA_SSL_KEY_PASSWORD"
      value: "test1234"
    - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "KAFKA_SSL_TRUSTSTORE_LOCATION"
      value: "/tmp/kafka.server.truststore.jks"
    - name: "RACK_COMMAND"
      value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\
        \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\
        \ -o /tmp/kafka.server.truststore.jks"
    image: "wurstmeister/kafka:2.12-2.4.1"
    imagePullPolicy: "IfNotPresent"
    name: "kafka"
    resources:
      limits:
        cpu: "4000m"
        memory: "6Gi"
      requests:
        cpu: "4000m"
        memory: "6Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_SERVER"
      value: "127.0.0.1:9092"
    - name: "ZOOKEEPER_SERVER"
      value: "127.0.0.1:2181"
    - name: "DOWNSTREAM_DB_HOST"
      value: "127.0.0.1"
    - name: "USE_FLAT_MESSAGE"
      value: "true"
    - name: "DOWNSTREAM_DB_PORT"
      value: "3306"
    - name: "DB_NAME"
      value: "test"
    image: "rustinliu/ticdc-canal-json-adapter:latest"
    imagePullPolicy: "IfNotPresent"
    name: "canal-adapter"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/network-multitool"
    name: "net-tool"
    resources:
      limits:
        memory: "128Mi"
        cpu: "100m"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/python3-requests:latest"
    name: "report"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "MYSQL_ROOT_PASSWORD"
      value: ""
    - name: "MYSQL_USER"
      value: "mysqluser"
    - name: "MYSQL_PASSWORD"
      value: "mysqlpw"
    - name: "MYSQL_ALLOW_EMPTY_PASSWORD"
      value: "yes"
    - name: "MYSQL_TCP_PORT"
      value: "3310"
    image: "quay.io/debezium/example-mysql:2.4"
    imagePullPolicy: "IfNotPresent"
    name: "mysql"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "BOOTSTRAP_SERVERS"
      value: "127.0.0.1:9092"
    - name: "GROUP_ID"
      value: "1"
    - name: "CONFIG_STORAGE_TOPIC"
      value: "my_connect_configs"
    - name: "OFFSET_STORAGE_TOPIC"
      value: "my_connect_offsets"
    - name: "STATUS_STORAGE_TOPIC"
      value: "my_connect_statuses"
    image: "quay.io/debezium/connect:2.4"
    name: "connect"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "JENKINS_SECRET"
      value: "********"
    - name: "JENKINS_TUNNEL"
      value: "jenkins-agent.apps.svc.cluster.local:50000"
    - name: "JENKINS_AGENT_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f"
    - name: "JENKINS_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f"
    - name: "JENKINS_AGENT_WORKDIR"
      value: "/home/jenkins/agent"
    - name: "JENKINS_URL"
      value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/"
    image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2"
    name: "jnlp"
    resources:
      requests:
        memory: "256Mi"
        cpu: "100m"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  restartPolicy: "Never"
  securityContext:
    fsGroup: 1000
  volumes:
  - emptyDir: {}
    name: "volume-0"
  - emptyDir:
      medium: ""
    name: "workspace-volume"

Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1855-34lmg-6f874
---
apiVersion: "v1"
kind: "Pod"
metadata:
  annotations:
    buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
    runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
  labels:
    jenkins/jenkins-jenkins-agent: "true"
    jenkins/label-digest: "1a38a519bab22d7fc28e5d9d7871332bf9464c38"
    jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1855-34lmg"
  name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0"
  namespace: "jenkins-tiflow"
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
        - matchExpressions:
          - key: "kubernetes.io/arch"
            operator: "In"
            values:
            - "amd64"
  containers:
  - image: "wurstmeister/zookeeper"
    imagePullPolicy: "IfNotPresent"
    name: "zookeeper"
    resources:
      limits:
        cpu: "2000m"
        memory: "4Gi"
      requests:
        cpu: "2000m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - args:
    - "cat"
    image: "hub.pingcap.net/jenkins/golang-tini:1.21"
    imagePullPolicy: "Always"
    name: "golang"
    resources:
      limits:
        cpu: "12"
        memory: "32Gi"
      requests:
        cpu: "12"
        memory: "32Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_CREATE_TOPICS"
      value: "big-message-test:1:1"
    - name: "KAFKA_BROKER_ID"
      value: "1"
    - name: "KAFKA_SSL_KEYSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_ZOOKEEPER_CONNECT"
      value: "localhost:2181"
    - name: "KAFKA_MESSAGE_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_REPLICA_FETCH_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_ADVERTISED_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "ZK"
      value: "zk"
    - name: "KAFKA_SSL_KEYSTORE_LOCATION"
      value: "/tmp/kafka.server.keystore.jks"
    - name: "KAFKA_SSL_KEY_PASSWORD"
      value: "test1234"
    - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "KAFKA_SSL_TRUSTSTORE_LOCATION"
      value: "/tmp/kafka.server.truststore.jks"
    - name: "RACK_COMMAND"
      value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\
        \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\
        \ -o /tmp/kafka.server.truststore.jks"
    image: "wurstmeister/kafka:2.12-2.4.1"
    imagePullPolicy: "IfNotPresent"
    name: "kafka"
    resources:
      limits:
        cpu: "4000m"
        memory: "6Gi"
      requests:
        cpu: "4000m"
        memory: "6Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_SERVER"
      value: "127.0.0.1:9092"
    - name: "ZOOKEEPER_SERVER"
      value: "127.0.0.1:2181"
    - name: "DOWNSTREAM_DB_HOST"
      value: "127.0.0.1"
    - name: "USE_FLAT_MESSAGE"
      value: "true"
    - name: "DOWNSTREAM_DB_PORT"
      value: "3306"
    - name: "DB_NAME"
      value: "test"
    image: "rustinliu/ticdc-canal-json-adapter:latest"
    imagePullPolicy: "IfNotPresent"
    name: "canal-adapter"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/network-multitool"
    name: "net-tool"
    resources:
      limits:
        memory: "128Mi"
        cpu: "100m"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/python3-requests:latest"
    name: "report"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "MYSQL_ROOT_PASSWORD"
      value: ""
    - name: "MYSQL_USER"
      value: "mysqluser"
    - name: "MYSQL_PASSWORD"
      value: "mysqlpw"
    - name: "MYSQL_ALLOW_EMPTY_PASSWORD"
      value: "yes"
    - name: "MYSQL_TCP_PORT"
      value: "3310"
    image: "quay.io/debezium/example-mysql:2.4"
    imagePullPolicy: "IfNotPresent"
    name: "mysql"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "BOOTSTRAP_SERVERS"
      value: "127.0.0.1:9092"
    - name: "GROUP_ID"
      value: "1"
    - name: "CONFIG_STORAGE_TOPIC"
      value: "my_connect_configs"
    - name: "OFFSET_STORAGE_TOPIC"
      value: "my_connect_offsets"
    - name: "STATUS_STORAGE_TOPIC"
      value: "my_connect_statuses"
    image: "quay.io/debezium/connect:2.4"
    name: "connect"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "JENKINS_SECRET"
      value: "********"
    - name: "JENKINS_TUNNEL"
      value: "jenkins-agent.apps.svc.cluster.local:50000"
    - name: "JENKINS_AGENT_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0"
    - name: "JENKINS_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0"
    - name: "JENKINS_AGENT_WORKDIR"
      value: "/home/jenkins/agent"
    - name: "JENKINS_URL"
      value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/"
    image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2"
    name: "jnlp"
    resources:
      requests:
        memory: "256Mi"
        cpu: "100m"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  restartPolicy: "Never"
  securityContext:
    fsGroup: 1000
  volumes:
  - emptyDir: {}
    name: "volume-0"
  - emptyDir:
      medium: ""
    name: "workspace-volume"

Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1855-3nsj2-hrfpd
---
apiVersion: "v1"
kind: "Pod"
metadata:
  annotations:
    buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
    runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
  labels:
    jenkins/jenkins-jenkins-agent: "true"
    jenkins/label-digest: "87441ebb6b8bbf0645d55d13958ea311e7ce6bca"
    jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1855-3nsj2"
  name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3"
  namespace: "jenkins-tiflow"
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
        - matchExpressions:
          - key: "kubernetes.io/arch"
            operator: "In"
            values:
            - "amd64"
  containers:
  - image: "wurstmeister/zookeeper"
    imagePullPolicy: "IfNotPresent"
    name: "zookeeper"
    resources:
      limits:
        cpu: "2000m"
        memory: "4Gi"
      requests:
        cpu: "2000m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - args:
    - "cat"
    image: "hub.pingcap.net/jenkins/golang-tini:1.21"
    imagePullPolicy: "Always"
    name: "golang"
    resources:
      limits:
        cpu: "12"
        memory: "32Gi"
      requests:
        cpu: "12"
        memory: "32Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_CREATE_TOPICS"
      value: "big-message-test:1:1"
    - name: "KAFKA_BROKER_ID"
      value: "1"
    - name: "KAFKA_SSL_KEYSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_ZOOKEEPER_CONNECT"
      value: "localhost:2181"
    - name: "KAFKA_MESSAGE_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_REPLICA_FETCH_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_ADVERTISED_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "ZK"
      value: "zk"
    - name: "KAFKA_SSL_KEYSTORE_LOCATION"
      value: "/tmp/kafka.server.keystore.jks"
    - name: "KAFKA_SSL_KEY_PASSWORD"
      value: "test1234"
    - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "KAFKA_SSL_TRUSTSTORE_LOCATION"
      value: "/tmp/kafka.server.truststore.jks"
    - name: "RACK_COMMAND"
      value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\
        \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\
        \ -o /tmp/kafka.server.truststore.jks"
    image: "wurstmeister/kafka:2.12-2.4.1"
    imagePullPolicy: "IfNotPresent"
    name: "kafka"
    resources:
      limits:
        cpu: "4000m"
        memory: "6Gi"
      requests:
        cpu: "4000m"
        memory: "6Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_SERVER"
      value: "127.0.0.1:9092"
    - name: "ZOOKEEPER_SERVER"
      value: "127.0.0.1:2181"
    - name: "DOWNSTREAM_DB_HOST"
      value: "127.0.0.1"
    - name: "USE_FLAT_MESSAGE"
      value: "true"
    - name: "DOWNSTREAM_DB_PORT"
      value: "3306"
    - name: "DB_NAME"
      value: "test"
    image: "rustinliu/ticdc-canal-json-adapter:latest"
    imagePullPolicy: "IfNotPresent"
    name: "canal-adapter"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/network-multitool"
    name: "net-tool"
    resources:
      limits:
        memory: "128Mi"
        cpu: "100m"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/python3-requests:latest"
    name: "report"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "MYSQL_ROOT_PASSWORD"
      value: ""
    - name: "MYSQL_USER"
      value: "mysqluser"
    - name: "MYSQL_PASSWORD"
      value: "mysqlpw"
    - name: "MYSQL_ALLOW_EMPTY_PASSWORD"
      value: "yes"
    - name: "MYSQL_TCP_PORT"
      value: "3310"
    image: "quay.io/debezium/example-mysql:2.4"
    imagePullPolicy: "IfNotPresent"
    name: "mysql"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "BOOTSTRAP_SERVERS"
      value: "127.0.0.1:9092"
    - name: "GROUP_ID"
      value: "1"
    - name: "CONFIG_STORAGE_TOPIC"
      value: "my_connect_configs"
    - name: "OFFSET_STORAGE_TOPIC"
      value: "my_connect_offsets"
    - name: "STATUS_STORAGE_TOPIC"
      value: "my_connect_statuses"
    image: "quay.io/debezium/connect:2.4"
    name: "connect"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "JENKINS_SECRET"
      value: "********"
    - name: "JENKINS_TUNNEL"
      value: "jenkins-agent.apps.svc.cluster.local:50000"
    - name: "JENKINS_AGENT_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3"
    - name: "JENKINS_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3"
    - name: "JENKINS_AGENT_WORKDIR"
      value: "/home/jenkins/agent"
    - name: "JENKINS_URL"
      value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/"
    image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2"
    name: "jnlp"
    resources:
      requests:
        memory: "256Mi"
        cpu: "100m"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  restartPolicy: "Never"
  securityContext:
    fsGroup: 1000
  volumes:
  - emptyDir: {}
    name: "volume-0"
  - emptyDir:
      medium: ""
    name: "workspace-volume"

Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc)
3723625472 bytes in 27.70 secs (134422760 bytes/sec)
[Pipeline] {
[Pipeline] podTemplate
[Pipeline] {
[Pipeline] podTemplate
[Pipeline] {
[Pipeline] podTemplate
[Pipeline] {
[Pipeline] podTemplate
[Pipeline] {
[Pipeline] podTemplate
[Pipeline] {
[Pipeline] podTemplate
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] {
[Pipeline] node
[Pipeline] node
[Pipeline] node
[Pipeline] node
[Pipeline] node
[Pipeline] node
[Pipeline] timeout
Timeout set to expire in 6 min 0 sec
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] sh
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] checkout
[Pipeline] {
[Pipeline] {
The recommended git tool is: git
[Pipeline] {
[Pipeline] {
[Pipeline] {
[Pipeline] checkout
+ echo Waiting for zookeeper to be ready...
Waiting for zookeeper to be ready...
+ nc -z localhost 2181
+ echo Waiting for kafka to be ready...
Waiting for kafka to be ready...
+ nc -z localhost 9092
+ echo Waiting for kafka-broker to be ready...
Waiting for kafka-broker to be ready...
+ echo dump
+ nc localhost 2181
+ grep brokers
+ awk {$1=$1;print}
+ grep -F -w /brokers/ids/1
/brokers/ids/1
[Pipeline] checkout
The recommended git tool is: git
[Pipeline] checkout
The recommended git tool is: git
[Pipeline] checkout
[Pipeline] checkout
The recommended git tool is: git
The recommended git tool is: git
The recommended git tool is: git
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1855-klk36-npv9p
[Pipeline] timeout
Timeout set to expire in 45 min
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 45 min
[Pipeline] {
Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1855-nbv84-f5n24
[Pipeline] timeout
Timeout set to expire in 45 min
[Pipeline] {
Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1855-rdz18-z9rmg
[Pipeline] timeout
Timeout set to expire in 45 min
Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1855-jb9b1-r2rkj
[Pipeline] {
No credentials specified
Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@15305fe4; decorates RemoteLauncher[hudson.remoting.Channel@77afdbfc:JNLP4-connect connection from 10.233.97.97/10.233.97.97:48328] will be ignored (a typical symptom is the Git executable not being run inside a designated container)
Cloning the remote Git repository
Using shallow clone with depth 1
No credentials specified
Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@17e2bf49; decorates RemoteLauncher[hudson.remoting.Channel@204e4039:JNLP4-connect connection from 10.233.106.187/10.233.106.187:44160] will be ignored (a typical symptom is the Git executable not being run inside a designated container)
Cloning the remote Git repository
Using shallow clone with depth 1
No credentials specified
Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@7e829d95; decorates RemoteLauncher[hudson.remoting.Channel@19995ab5:JNLP4-connect connection from 10.233.86.217/10.233.86.217:55920] will be ignored (a typical symptom is the Git executable not being run inside a designated container)
Cloning the remote Git repository
Using shallow clone with depth 1
No credentials specified
Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@32dc689c; decorates RemoteLauncher[hudson.remoting.Channel@28e80456:JNLP4-connect connection from 10.233.107.9/10.233.107.9:46522] will be ignored (a typical symptom is the Git executable not being run inside a designated container)
Cloning the remote Git repository
Using shallow clone with depth 1
No credentials specified
Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@7509d83c; decorates RemoteLauncher[hudson.remoting.Channel@1f62c29:JNLP4-connect connection from 10.233.100.141/10.233.100.141:37828] will be ignored (a typical symptom is the Git executable not being run inside a designated container)
Cloning the remote Git repository
Using shallow clone with depth 1
No credentials specified
Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@336abe09; decorates RemoteLauncher[hudson.remoting.Channel@50959873:JNLP4-connect connection from 10.233.88.30/10.233.88.30:45550] will be ignored (a typical symptom is the Git executable not being run inside a designated container)
Cloning the remote Git repository
Using shallow clone with depth 1
Cloning repository https://github.com/PingCAP-QE/ci.git
 > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10
Fetching upstream changes from https://github.com/PingCAP-QE/ci.git
 > git --version # timeout=10
 > git --version # 'git version 2.39.2'
 > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5
Cloning repository https://github.com/PingCAP-QE/ci.git
 > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10
Fetching upstream changes from https://github.com/PingCAP-QE/ci.git
 > git --version # timeout=10
 > git --version # 'git version 2.39.2'
 > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5
Cloning repository https://github.com/PingCAP-QE/ci.git
 > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10
Fetching upstream changes from https://github.com/PingCAP-QE/ci.git
 > git --version # timeout=10
 > git --version # 'git version 2.39.2'
 > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5
Cloning repository https://github.com/PingCAP-QE/ci.git
 > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10
Fetching upstream changes from https://github.com/PingCAP-QE/ci.git
 > git --version # timeout=10
 > git --version # 'git version 2.39.2'
 > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5
Cloning repository https://github.com/PingCAP-QE/ci.git
 > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10
Fetching upstream changes from https://github.com/PingCAP-QE/ci.git
 > git --version # timeout=10
 > git --version # 'git version 2.39.2'
 > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5
Cloning repository https://github.com/PingCAP-QE/ci.git
 > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10
Fetching upstream changes from https://github.com/PingCAP-QE/ci.git
 > git --version # timeout=10
 > git --version # 'git version 2.39.2'
 > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5
Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wv6pc-4pcjp
Created Pod: kubernetes jenkins-tiflow/pingcap-tiflow-pull-cdc-integration-kafka-test-1855-ckb5f-rwq9s
[Pipeline] timeout
Timeout set to expire in 45 min
[Pipeline] {
[Pipeline] }
[Pipeline] withCredentials
Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN
[Pipeline] withCredentials
Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN
[Pipeline] withCredentials
Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN
[Pipeline] withCredentials
Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN
[Pipeline] withCredentials
Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN
[Pipeline] // timeout
[Pipeline] }
[Pipeline] {
[Pipeline] {
[Pipeline] {
[Pipeline] {
[Pipeline] {
[Pipeline] // container
[Pipeline] sh
[Pipeline] dir
Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
[Pipeline] {
[Pipeline] dir
Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
[Pipeline] {
[Pipeline] dir
Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
[Pipeline] {
[Pipeline] dir
Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
[Pipeline] {
[Pipeline] dir
Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
[Pipeline] {
[Pipeline] cache
+ rm -rf /tmp/tidb_cdc_test
+ mkdir -p /tmp/tidb_cdc_test
+ chmod +x ./tests/integration_tests/run_group.sh
+ ./tests/integration_tests/run_group.sh kafka G00
Run cases: bdr_mode capture_suicide_while_balance_table syncpoint hang_sink_suicide server_config_compatibility changefeed_dup_error_restart kafka_big_messages kafka_compression kafka_messages kafka_sink_error_resume mq_sink_lost_callback mq_sink_dispatcher kafka_column_selector kafka_column_selector_avro debezium lossy_ddl storage_csv_update
PROW_JOB_ID=d886c6eb-17f2-4f64-86e7-91214509cf93
JENKINS_NODE_COOKIE=db8178a4-241e-4057-994a-6a93ff0e0f14
BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/
GOLANG_VERSION=1.21.0
HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd
HUDSON_SERVER_COOKIE=83ef27fe9acccc92
KUBERNETES_PORT=tcp://10.233.0.1:443
KUBERNETES_PORT_443_TCP_PORT=443
TERM=xterm
STAGE_NAME=Test
BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855
KUBERNETES_SERVICE_PORT=443
GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009
JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786957579159605252","prowjobid":"d886c6eb-17f2-4f64-86e7-91214509cf93","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","author_link":"https://github.com/lidezhu"}]}}
KUBERNETES_SERVICE_HOST=10.233.0.1
WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/
RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=changes
RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=artifacts
FILE_SERVER_URL=http://fileserver.pingcap.net
JENKINS_HOME=/var/jenkins_home
GIT_COMMIT=03312178c534dce949face80c69812d989e55009
PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin
RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect
GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct
POD_CONTAINER=golang
PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
HUDSON_URL=https://do.pingcap.net/jenkins/
TICDC_COVERALLS_TOKEN=****
JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test
TZ=Asia/Shanghai
BUILD_DISPLAY_NAME=#1855
TEST_GROUP=G00
JENKINS_URL=https://do.pingcap.net/jenkins/
BUILD_ID=1786957579159605252
TICDC_CODECOV_TOKEN=****
GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742
JOB_BASE_NAME=pull_cdc_integration_kafka_test
GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009
RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=tests
SHLVL=5
HOME=/home/jenkins
POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-pq5wq
GOROOT=/usr/local/go
GIT_BRANCH=origin/main
KUBERNETES_PORT_443_TCP_PROTO=tcp
TINI_VERSION=v0.19.0
CI=true
KUBERNETES_SERVICE_PORT_HTTPS=443
WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp
EXECUTOR_NUMBER=0
JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a
NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-pq5wq pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd
GIT_URL=https://github.com/PingCAP-QE/ci.git
HUDSON_HOME=/var/jenkins_home
CLASSPATH=
NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd
GOPATH=/go
JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect
BUILD_NUMBER=1855
KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1
KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443
GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz
_=/usr/bin/env
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/bdr_mode/run.sh using Sink-Type: kafka... <<=================
[Sun May  5 11:20:11 CST 2024] <<<<<< run test case bdr_mode success! >>>>>>
Avoid second fetch
Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main)
 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git rev-parse origin/main^{commit} # timeout=10
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10
Commit message: "fix(br): use failpoint tidb-server instead (#2951)"
 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
Avoid second fetch
Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main)
Avoid second fetch
Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main)
Commit message: "fix(br): use failpoint tidb-server instead (#2951)"
Commit message: "fix(br): use failpoint tidb-server instead (#2951)"
Avoid second fetch
Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main)
Commit message: "fix(br): use failpoint tidb-server instead (#2951)"
 > git rev-parse origin/main^{commit} # timeout=10
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10
 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git rev-parse origin/main^{commit} # timeout=10
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10
Avoid second fetch
Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main)
 > git rev-parse origin/main^{commit} # timeout=10
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10
 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git rev-parse origin/main^{commit} # timeout=10
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10
Avoid second fetch
Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main)
Commit message: "fix(br): use failpoint tidb-server instead (#2951)"
Commit message: "fix(br): use failpoint tidb-server instead (#2951)"
Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1855-ckb5f-rwq9s is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1855-ckb5f-6bmd4
---
apiVersion: "v1"
kind: "Pod"
metadata:
  annotations:
    buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
    runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
  labels:
    jenkins/jenkins-jenkins-agent: "true"
    jenkins/label-digest: "2233cdd8260a1ab745a47741bb545d20b1887cd0"
    jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1855-ckb5f"
  name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-ckb5f-rwq9s"
  namespace: "jenkins-tiflow"
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
        - matchExpressions:
          - key: "kubernetes.io/arch"
            operator: "In"
            values:
            - "amd64"
  containers:
  - image: "wurstmeister/zookeeper"
    imagePullPolicy: "IfNotPresent"
    name: "zookeeper"
    resources:
      limits:
        cpu: "2000m"
        memory: "4Gi"
      requests:
        cpu: "2000m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - args:
    - "cat"
    image: "hub.pingcap.net/jenkins/golang-tini:1.21"
    imagePullPolicy: "Always"
    name: "golang"
    resources:
      limits:
        cpu: "12"
        memory: "32Gi"
      requests:
        cpu: "12"
        memory: "32Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_CREATE_TOPICS"
      value: "big-message-test:1:1"
    - name: "KAFKA_BROKER_ID"
      value: "1"
    - name: "KAFKA_SSL_KEYSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_ZOOKEEPER_CONNECT"
      value: "localhost:2181"
    - name: "KAFKA_MESSAGE_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_REPLICA_FETCH_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_ADVERTISED_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "ZK"
      value: "zk"
    - name: "KAFKA_SSL_KEYSTORE_LOCATION"
      value: "/tmp/kafka.server.keystore.jks"
    - name: "KAFKA_SSL_KEY_PASSWORD"
      value: "test1234"
    - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "KAFKA_SSL_TRUSTSTORE_LOCATION"
      value: "/tmp/kafka.server.truststore.jks"
    - name: "RACK_COMMAND"
      value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\
        \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\
        \ -o /tmp/kafka.server.truststore.jks"
    image: "wurstmeister/kafka:2.12-2.4.1"
    imagePullPolicy: "IfNotPresent"
    name: "kafka"
    resources:
      limits:
        cpu: "4000m"
        memory: "6Gi"
      requests:
        cpu: "4000m"
        memory: "6Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_SERVER"
      value: "127.0.0.1:9092"
    - name: "ZOOKEEPER_SERVER"
      value: "127.0.0.1:2181"
    - name: "DOWNSTREAM_DB_HOST"
      value: "127.0.0.1"
    - name: "USE_FLAT_MESSAGE"
      value: "true"
    - name: "DOWNSTREAM_DB_PORT"
      value: "3306"
    - name: "DB_NAME"
      value: "test"
    image: "rustinliu/ticdc-canal-json-adapter:latest"
    imagePullPolicy: "IfNotPresent"
    name: "canal-adapter"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/network-multitool"
    name: "net-tool"
    resources:
      limits:
        memory: "128Mi"
        cpu: "100m"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/python3-requests:latest"
    name: "report"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "MYSQL_ROOT_PASSWORD"
      value: ""
    - name: "MYSQL_USER"
      value: "mysqluser"
    - name: "MYSQL_PASSWORD"
      value: "mysqlpw"
    - name: "MYSQL_ALLOW_EMPTY_PASSWORD"
      value: "yes"
    - name: "MYSQL_TCP_PORT"
      value: "3310"
    image: "quay.io/debezium/example-mysql:2.4"
    imagePullPolicy: "IfNotPresent"
    name: "mysql"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "BOOTSTRAP_SERVERS"
      value: "127.0.0.1:9092"
    - name: "GROUP_ID"
      value: "1"
    - name: "CONFIG_STORAGE_TOPIC"
      value: "my_connect_configs"
    - name: "OFFSET_STORAGE_TOPIC"
      value: "my_connect_offsets"
    - name: "STATUS_STORAGE_TOPIC"
      value: "my_connect_statuses"
    image: "quay.io/debezium/connect:2.4"
    name: "connect"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "JENKINS_SECRET"
      value: "********"
    - name: "JENKINS_TUNNEL"
      value: "jenkins-agent.apps.svc.cluster.local:50000"
    - name: "JENKINS_AGENT_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-ckb5f-rwq9s"
    - name: "JENKINS_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-ckb5f-rwq9s"
    - name: "JENKINS_AGENT_WORKDIR"
      value: "/home/jenkins/agent"
    - name: "JENKINS_URL"
      value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/"
    image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2"
    name: "jnlp"
    resources:
      requests:
        memory: "256Mi"
        cpu: "100m"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  restartPolicy: "Never"
  securityContext:
    fsGroup: 1000
  volumes:
  - emptyDir: {}
    name: "volume-0"
  - emptyDir:
      medium: ""
    name: "workspace-volume"

Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1855-ckb5f-rwq9s in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
 > git rev-parse origin/main^{commit} # timeout=10
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10
Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wv6pc-4pcjp is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1855-wv6pc-gkl9g
---
apiVersion: "v1"
kind: "Pod"
metadata:
  annotations:
    buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
    runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
  labels:
    jenkins/jenkins-jenkins-agent: "true"
    jenkins/label-digest: "7522f02e17585815dbdbb578b3b14172695baef0"
    jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1855-wv6pc"
  name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wv6pc-4pcjp"
  namespace: "jenkins-tiflow"
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
        - matchExpressions:
          - key: "kubernetes.io/arch"
            operator: "In"
            values:
            - "amd64"
  containers:
  - image: "wurstmeister/zookeeper"
    imagePullPolicy: "IfNotPresent"
    name: "zookeeper"
    resources:
      limits:
        cpu: "2000m"
        memory: "4Gi"
      requests:
        cpu: "2000m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - args:
    - "cat"
    image: "hub.pingcap.net/jenkins/golang-tini:1.21"
    imagePullPolicy: "Always"
    name: "golang"
    resources:
      limits:
        cpu: "12"
        memory: "32Gi"
      requests:
        cpu: "12"
        memory: "32Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_CREATE_TOPICS"
      value: "big-message-test:1:1"
    - name: "KAFKA_BROKER_ID"
      value: "1"
    - name: "KAFKA_SSL_KEYSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_ZOOKEEPER_CONNECT"
      value: "localhost:2181"
    - name: "KAFKA_MESSAGE_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_REPLICA_FETCH_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_ADVERTISED_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "ZK"
      value: "zk"
    - name: "KAFKA_SSL_KEYSTORE_LOCATION"
      value: "/tmp/kafka.server.keystore.jks"
    - name: "KAFKA_SSL_KEY_PASSWORD"
      value: "test1234"
    - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "KAFKA_SSL_TRUSTSTORE_LOCATION"
      value: "/tmp/kafka.server.truststore.jks"
    - name: "RACK_COMMAND"
      value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\
        \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\
        \ -o /tmp/kafka.server.truststore.jks"
    image: "wurstmeister/kafka:2.12-2.4.1"
    imagePullPolicy: "IfNotPresent"
    name: "kafka"
    resources:
      limits:
        cpu: "4000m"
        memory: "6Gi"
      requests:
        cpu: "4000m"
        memory: "6Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_SERVER"
      value: "127.0.0.1:9092"
    - name: "ZOOKEEPER_SERVER"
      value: "127.0.0.1:2181"
    - name: "DOWNSTREAM_DB_HOST"
      value: "127.0.0.1"
    - name: "USE_FLAT_MESSAGE"
      value: "true"
    - name: "DOWNSTREAM_DB_PORT"
      value: "3306"
    - name: "DB_NAME"
      value: "test"
    image: "rustinliu/ticdc-canal-json-adapter:latest"
    imagePullPolicy: "IfNotPresent"
    name: "canal-adapter"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/network-multitool"
    name: "net-tool"
    resources:
      limits:
        memory: "128Mi"
        cpu: "100m"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/python3-requests:latest"
    name: "report"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "MYSQL_ROOT_PASSWORD"
      value: ""
    - name: "MYSQL_USER"
      value: "mysqluser"
    - name: "MYSQL_PASSWORD"
      value: "mysqlpw"
    - name: "MYSQL_ALLOW_EMPTY_PASSWORD"
      value: "yes"
    - name: "MYSQL_TCP_PORT"
      value: "3310"
    image: "quay.io/debezium/example-mysql:2.4"
    imagePullPolicy: "IfNotPresent"
    name: "mysql"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "BOOTSTRAP_SERVERS"
      value: "127.0.0.1:9092"
    - name: "GROUP_ID"
      value: "1"
    - name: "CONFIG_STORAGE_TOPIC"
      value: "my_connect_configs"
    - name: "OFFSET_STORAGE_TOPIC"
      value: "my_connect_offsets"
    - name: "STATUS_STORAGE_TOPIC"
      value: "my_connect_statuses"
    image: "quay.io/debezium/connect:2.4"
    name: "connect"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "JENKINS_SECRET"
      value: "********"
    - name: "JENKINS_TUNNEL"
      value: "jenkins-agent.apps.svc.cluster.local:50000"
    - name: "JENKINS_AGENT_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wv6pc-4pcjp"
    - name: "JENKINS_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wv6pc-4pcjp"
    - name: "JENKINS_AGENT_WORKDIR"
      value: "/home/jenkins/agent"
    - name: "JENKINS_URL"
      value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/"
    image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2"
    name: "jnlp"
    resources:
      requests:
        memory: "256Mi"
        cpu: "100m"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  restartPolicy: "Never"
  securityContext:
    fsGroup: 1000
  volumes:
  - emptyDir: {}
    name: "volume-0"
  - emptyDir:
      medium: ""
    name: "workspace-volume"

Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wv6pc-4pcjp in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1855-jb9b1-r2rkj is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1855-jb9b1-h2tv3
---
apiVersion: "v1"
kind: "Pod"
metadata:
  annotations:
    buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
    runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
  labels:
    jenkins/jenkins-jenkins-agent: "true"
    jenkins/label-digest: "fec966d6e346cf1877dca0517f3c2d641436fe7f"
    jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1855-jb9b1"
  name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-jb9b1-r2rkj"
  namespace: "jenkins-tiflow"
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
        - matchExpressions:
          - key: "kubernetes.io/arch"
            operator: "In"
            values:
            - "amd64"
  containers:
  - image: "wurstmeister/zookeeper"
    imagePullPolicy: "IfNotPresent"
    name: "zookeeper"
    resources:
      limits:
        cpu: "2000m"
        memory: "4Gi"
      requests:
        cpu: "2000m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - args:
    - "cat"
    image: "hub.pingcap.net/jenkins/golang-tini:1.21"
    imagePullPolicy: "Always"
    name: "golang"
    resources:
      limits:
        cpu: "12"
        memory: "32Gi"
      requests:
        cpu: "12"
        memory: "32Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_CREATE_TOPICS"
      value: "big-message-test:1:1"
    - name: "KAFKA_BROKER_ID"
      value: "1"
    - name: "KAFKA_SSL_KEYSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_ZOOKEEPER_CONNECT"
      value: "localhost:2181"
    - name: "KAFKA_MESSAGE_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_REPLICA_FETCH_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_ADVERTISED_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "ZK"
      value: "zk"
    - name: "KAFKA_SSL_KEYSTORE_LOCATION"
      value: "/tmp/kafka.server.keystore.jks"
    - name: "KAFKA_SSL_KEY_PASSWORD"
      value: "test1234"
    - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "KAFKA_SSL_TRUSTSTORE_LOCATION"
      value: "/tmp/kafka.server.truststore.jks"
    - name: "RACK_COMMAND"
      value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\
        \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\
        \ -o /tmp/kafka.server.truststore.jks"
    image: "wurstmeister/kafka:2.12-2.4.1"
    imagePullPolicy: "IfNotPresent"
    name: "kafka"
    resources:
      limits:
        cpu: "4000m"
        memory: "6Gi"
      requests:
        cpu: "4000m"
        memory: "6Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_SERVER"
      value: "127.0.0.1:9092"
    - name: "ZOOKEEPER_SERVER"
      value: "127.0.0.1:2181"
    - name: "DOWNSTREAM_DB_HOST"
      value: "127.0.0.1"
    - name: "USE_FLAT_MESSAGE"
      value: "true"
    - name: "DOWNSTREAM_DB_PORT"
      value: "3306"
    - name: "DB_NAME"
      value: "test"
    image: "rustinliu/ticdc-canal-json-adapter:latest"
    imagePullPolicy: "IfNotPresent"
    name: "canal-adapter"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/network-multitool"
    name: "net-tool"
    resources:
      limits:
        memory: "128Mi"
        cpu: "100m"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/python3-requests:latest"
    name: "report"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "MYSQL_ROOT_PASSWORD"
      value: ""
    - name: "MYSQL_USER"
      value: "mysqluser"
    - name: "MYSQL_PASSWORD"
      value: "mysqlpw"
    - name: "MYSQL_ALLOW_EMPTY_PASSWORD"
      value: "yes"
    - name: "MYSQL_TCP_PORT"
      value: "3310"
    image: "quay.io/debezium/example-mysql:2.4"
    imagePullPolicy: "IfNotPresent"
    name: "mysql"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "BOOTSTRAP_SERVERS"
      value: "127.0.0.1:9092"
    - name: "GROUP_ID"
      value: "1"
    - name: "CONFIG_STORAGE_TOPIC"
      value: "my_connect_configs"
    - name: "OFFSET_STORAGE_TOPIC"
      value: "my_connect_offsets"
    - name: "STATUS_STORAGE_TOPIC"
      value: "my_connect_statuses"
    image: "quay.io/debezium/connect:2.4"
    name: "connect"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "JENKINS_SECRET"
      value: "********"
    - name: "JENKINS_TUNNEL"
      value: "jenkins-agent.apps.svc.cluster.local:50000"
    - name: "JENKINS_AGENT_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-jb9b1-r2rkj"
    - name: "JENKINS_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-jb9b1-r2rkj"
    - name: "JENKINS_AGENT_WORKDIR"
      value: "/home/jenkins/agent"
    - name: "JENKINS_URL"
      value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/"
    image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2"
    name: "jnlp"
    resources:
      requests:
        memory: "256Mi"
        cpu: "100m"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  restartPolicy: "Never"
  securityContext:
    fsGroup: 1000
  volumes:
  - emptyDir: {}
    name: "volume-0"
  - emptyDir:
      medium: ""
    name: "workspace-volume"

find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/capture_suicide_while_balance_table/run.sh using Sink-Type: kafka... <<=================
[Sun May  5 11:20:14 CST 2024] <<<<<< run test case capture_suicide_while_balance_table success! >>>>>>
Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1855-jb9b1-r2rkj in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1855-klk36-npv9p is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1855-klk36-kzdf5
---
apiVersion: "v1"
kind: "Pod"
metadata:
  annotations:
    buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
    runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
  labels:
    jenkins/jenkins-jenkins-agent: "true"
    jenkins/label-digest: "dec0a21c07560118a6f30eaab5ede894467f25b5"
    jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1855-klk36"
  name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-klk36-npv9p"
  namespace: "jenkins-tiflow"
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
        - matchExpressions:
          - key: "kubernetes.io/arch"
            operator: "In"
            values:
            - "amd64"
  containers:
  - image: "wurstmeister/zookeeper"
    imagePullPolicy: "IfNotPresent"
    name: "zookeeper"
    resources:
      limits:
        cpu: "2000m"
        memory: "4Gi"
      requests:
        cpu: "2000m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - args:
    - "cat"
    image: "hub.pingcap.net/jenkins/golang-tini:1.21"
    imagePullPolicy: "Always"
    name: "golang"
    resources:
      limits:
        cpu: "12"
        memory: "32Gi"
      requests:
        cpu: "12"
        memory: "32Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_CREATE_TOPICS"
      value: "big-message-test:1:1"
    - name: "KAFKA_BROKER_ID"
      value: "1"
    - name: "KAFKA_SSL_KEYSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_ZOOKEEPER_CONNECT"
      value: "localhost:2181"
    - name: "KAFKA_MESSAGE_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_REPLICA_FETCH_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_ADVERTISED_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "ZK"
      value: "zk"
    - name: "KAFKA_SSL_KEYSTORE_LOCATION"
      value: "/tmp/kafka.server.keystore.jks"
    - name: "KAFKA_SSL_KEY_PASSWORD"
      value: "test1234"
    - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "KAFKA_SSL_TRUSTSTORE_LOCATION"
      value: "/tmp/kafka.server.truststore.jks"
    - name: "RACK_COMMAND"
      value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\
        \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\
        \ -o /tmp/kafka.server.truststore.jks"
    image: "wurstmeister/kafka:2.12-2.4.1"
    imagePullPolicy: "IfNotPresent"
    name: "kafka"
    resources:
      limits:
        cpu: "4000m"
        memory: "6Gi"
      requests:
        cpu: "4000m"
        memory: "6Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_SERVER"
      value: "127.0.0.1:9092"
    - name: "ZOOKEEPER_SERVER"
      value: "127.0.0.1:2181"
    - name: "DOWNSTREAM_DB_HOST"
      value: "127.0.0.1"
    - name: "USE_FLAT_MESSAGE"
      value: "true"
    - name: "DOWNSTREAM_DB_PORT"
      value: "3306"
    - name: "DB_NAME"
      value: "test"
    image: "rustinliu/ticdc-canal-json-adapter:latest"
    imagePullPolicy: "IfNotPresent"
    name: "canal-adapter"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/network-multitool"
    name: "net-tool"
    resources:
      limits:
        memory: "128Mi"
        cpu: "100m"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/python3-requests:latest"
    name: "report"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "MYSQL_ROOT_PASSWORD"
      value: ""
    - name: "MYSQL_USER"
      value: "mysqluser"
    - name: "MYSQL_PASSWORD"
      value: "mysqlpw"
    - name: "MYSQL_ALLOW_EMPTY_PASSWORD"
      value: "yes"
    - name: "MYSQL_TCP_PORT"
      value: "3310"
    image: "quay.io/debezium/example-mysql:2.4"
    imagePullPolicy: "IfNotPresent"
    name: "mysql"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "BOOTSTRAP_SERVERS"
      value: "127.0.0.1:9092"
    - name: "GROUP_ID"
      value: "1"
    - name: "CONFIG_STORAGE_TOPIC"
      value: "my_connect_configs"
    - name: "OFFSET_STORAGE_TOPIC"
      value: "my_connect_offsets"
    - name: "STATUS_STORAGE_TOPIC"
      value: "my_connect_statuses"
    image: "quay.io/debezium/connect:2.4"
    name: "connect"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "JENKINS_SECRET"
      value: "********"
    - name: "JENKINS_TUNNEL"
      value: "jenkins-agent.apps.svc.cluster.local:50000"
    - name: "JENKINS_AGENT_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-klk36-npv9p"
    - name: "JENKINS_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-klk36-npv9p"
    - name: "JENKINS_AGENT_WORKDIR"
      value: "/home/jenkins/agent"
    - name: "JENKINS_URL"
      value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/"
    image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2"
    name: "jnlp"
    resources:
      requests:
        memory: "256Mi"
        cpu: "100m"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  restartPolicy: "Never"
  securityContext:
    fsGroup: 1000
  volumes:
  - emptyDir: {}
    name: "volume-0"
  - emptyDir:
      medium: ""
    name: "workspace-volume"

Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1855-klk36-npv9p in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1855-nbv84-f5n24 is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1855-nbv84-cmn1w
---
apiVersion: "v1"
kind: "Pod"
metadata:
  annotations:
    buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
    runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
  labels:
    jenkins/jenkins-jenkins-agent: "true"
    jenkins/label-digest: "59a6ab2baca08c267315ed742dfe541dbb439b3a"
    jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1855-nbv84"
  name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-nbv84-f5n24"
  namespace: "jenkins-tiflow"
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
        - matchExpressions:
          - key: "kubernetes.io/arch"
            operator: "In"
            values:
            - "amd64"
  containers:
  - image: "wurstmeister/zookeeper"
    imagePullPolicy: "IfNotPresent"
    name: "zookeeper"
    resources:
      limits:
        cpu: "2000m"
        memory: "4Gi"
      requests:
        cpu: "2000m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - args:
    - "cat"
    image: "hub.pingcap.net/jenkins/golang-tini:1.21"
    imagePullPolicy: "Always"
    name: "golang"
    resources:
      limits:
        cpu: "12"
        memory: "32Gi"
      requests:
        cpu: "12"
        memory: "32Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_CREATE_TOPICS"
      value: "big-message-test:1:1"
    - name: "KAFKA_BROKER_ID"
      value: "1"
    - name: "KAFKA_SSL_KEYSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_ZOOKEEPER_CONNECT"
      value: "localhost:2181"
    - name: "KAFKA_MESSAGE_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_REPLICA_FETCH_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_ADVERTISED_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "ZK"
      value: "zk"
    - name: "KAFKA_SSL_KEYSTORE_LOCATION"
      value: "/tmp/kafka.server.keystore.jks"
    - name: "KAFKA_SSL_KEY_PASSWORD"
      value: "test1234"
    - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "KAFKA_SSL_TRUSTSTORE_LOCATION"
      value: "/tmp/kafka.server.truststore.jks"
    - name: "RACK_COMMAND"
      value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\
        \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\
        \ -o /tmp/kafka.server.truststore.jks"
    image: "wurstmeister/kafka:2.12-2.4.1"
    imagePullPolicy: "IfNotPresent"
    name: "kafka"
    resources:
      limits:
        cpu: "4000m"
        memory: "6Gi"
      requests:
        cpu: "4000m"
        memory: "6Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_SERVER"
      value: "127.0.0.1:9092"
    - name: "ZOOKEEPER_SERVER"
      value: "127.0.0.1:2181"
    - name: "DOWNSTREAM_DB_HOST"
      value: "127.0.0.1"
    - name: "USE_FLAT_MESSAGE"
      value: "true"
    - name: "DOWNSTREAM_DB_PORT"
      value: "3306"
    - name: "DB_NAME"
      value: "test"
    image: "rustinliu/ticdc-canal-json-adapter:latest"
    imagePullPolicy: "IfNotPresent"
    name: "canal-adapter"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/network-multitool"
    name: "net-tool"
    resources:
      limits:
        memory: "128Mi"
        cpu: "100m"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/python3-requests:latest"
    name: "report"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "MYSQL_ROOT_PASSWORD"
      value: ""
    - name: "MYSQL_USER"
      value: "mysqluser"
    - name: "MYSQL_PASSWORD"
      value: "mysqlpw"
    - name: "MYSQL_ALLOW_EMPTY_PASSWORD"
      value: "yes"
    - name: "MYSQL_TCP_PORT"
      value: "3310"
    image: "quay.io/debezium/example-mysql:2.4"
    imagePullPolicy: "IfNotPresent"
    name: "mysql"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "BOOTSTRAP_SERVERS"
      value: "127.0.0.1:9092"
    - name: "GROUP_ID"
      value: "1"
    - name: "CONFIG_STORAGE_TOPIC"
      value: "my_connect_configs"
    - name: "OFFSET_STORAGE_TOPIC"
      value: "my_connect_offsets"
    - name: "STATUS_STORAGE_TOPIC"
      value: "my_connect_statuses"
    image: "quay.io/debezium/connect:2.4"
    name: "connect"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "JENKINS_SECRET"
      value: "********"
    - name: "JENKINS_TUNNEL"
      value: "jenkins-agent.apps.svc.cluster.local:50000"
    - name: "JENKINS_AGENT_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-nbv84-f5n24"
    - name: "JENKINS_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-nbv84-f5n24"
    - name: "JENKINS_AGENT_WORKDIR"
      value: "/home/jenkins/agent"
    - name: "JENKINS_URL"
      value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/"
    image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2"
    name: "jnlp"
    resources:
      requests:
        memory: "256Mi"
        cpu: "100m"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  restartPolicy: "Never"
  securityContext:
    fsGroup: 1000
  volumes:
  - emptyDir: {}
    name: "volume-0"
  - emptyDir:
      medium: ""
    name: "workspace-volume"

Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1855-nbv84-f5n24 in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
Agent pingcap-tiflow-pull-cdc-integration-kafka-test-1855-rdz18-z9rmg is provisioned from template pingcap_tiflow_pull_cdc_integration_kafka_test_1855-rdz18-dxgf1
---
apiVersion: "v1"
kind: "Pod"
metadata:
  annotations:
    buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
    runUrl: "job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/"
  labels:
    jenkins/jenkins-jenkins-agent: "true"
    jenkins/label-digest: "8d0aec765f76d3fc2580e05d1edaab58363b8d3e"
    jenkins/label: "pingcap_tiflow_pull_cdc_integration_kafka_test_1855-rdz18"
  name: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-rdz18-z9rmg"
  namespace: "jenkins-tiflow"
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
        - matchExpressions:
          - key: "kubernetes.io/arch"
            operator: "In"
            values:
            - "amd64"
  containers:
  - image: "wurstmeister/zookeeper"
    imagePullPolicy: "IfNotPresent"
    name: "zookeeper"
    resources:
      limits:
        cpu: "2000m"
        memory: "4Gi"
      requests:
        cpu: "2000m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - args:
    - "cat"
    image: "hub.pingcap.net/jenkins/golang-tini:1.21"
    imagePullPolicy: "Always"
    name: "golang"
    resources:
      limits:
        cpu: "12"
        memory: "32Gi"
      requests:
        cpu: "12"
        memory: "32Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_CREATE_TOPICS"
      value: "big-message-test:1:1"
    - name: "KAFKA_BROKER_ID"
      value: "1"
    - name: "KAFKA_SSL_KEYSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_ZOOKEEPER_CONNECT"
      value: "localhost:2181"
    - name: "KAFKA_MESSAGE_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_REPLICA_FETCH_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_ADVERTISED_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "ZK"
      value: "zk"
    - name: "KAFKA_SSL_KEYSTORE_LOCATION"
      value: "/tmp/kafka.server.keystore.jks"
    - name: "KAFKA_SSL_KEY_PASSWORD"
      value: "test1234"
    - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "KAFKA_SSL_TRUSTSTORE_LOCATION"
      value: "/tmp/kafka.server.truststore.jks"
    - name: "RACK_COMMAND"
      value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\
        \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\
        \ -o /tmp/kafka.server.truststore.jks"
    image: "wurstmeister/kafka:2.12-2.4.1"
    imagePullPolicy: "IfNotPresent"
    name: "kafka"
    resources:
      limits:
        cpu: "4000m"
        memory: "6Gi"
      requests:
        cpu: "4000m"
        memory: "6Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_SERVER"
      value: "127.0.0.1:9092"
    - name: "ZOOKEEPER_SERVER"
      value: "127.0.0.1:2181"
    - name: "DOWNSTREAM_DB_HOST"
      value: "127.0.0.1"
    - name: "USE_FLAT_MESSAGE"
      value: "true"
    - name: "DOWNSTREAM_DB_PORT"
      value: "3306"
    - name: "DB_NAME"
      value: "test"
    image: "rustinliu/ticdc-canal-json-adapter:latest"
    imagePullPolicy: "IfNotPresent"
    name: "canal-adapter"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/network-multitool"
    name: "net-tool"
    resources:
      limits:
        memory: "128Mi"
        cpu: "100m"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/python3-requests:latest"
    name: "report"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "MYSQL_ROOT_PASSWORD"
      value: ""
    - name: "MYSQL_USER"
      value: "mysqluser"
    - name: "MYSQL_PASSWORD"
      value: "mysqlpw"
    - name: "MYSQL_ALLOW_EMPTY_PASSWORD"
      value: "yes"
    - name: "MYSQL_TCP_PORT"
      value: "3310"
    image: "quay.io/debezium/example-mysql:2.4"
    imagePullPolicy: "IfNotPresent"
    name: "mysql"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "BOOTSTRAP_SERVERS"
      value: "127.0.0.1:9092"
    - name: "GROUP_ID"
      value: "1"
    - name: "CONFIG_STORAGE_TOPIC"
      value: "my_connect_configs"
    - name: "OFFSET_STORAGE_TOPIC"
      value: "my_connect_offsets"
    - name: "STATUS_STORAGE_TOPIC"
      value: "my_connect_statuses"
    image: "quay.io/debezium/connect:2.4"
    name: "connect"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "JENKINS_SECRET"
      value: "********"
    - name: "JENKINS_TUNNEL"
      value: "jenkins-agent.apps.svc.cluster.local:50000"
    - name: "JENKINS_AGENT_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-rdz18-z9rmg"
    - name: "JENKINS_NAME"
      value: "pingcap-tiflow-pull-cdc-integration-kafka-test-1855-rdz18-z9rmg"
    - name: "JENKINS_AGENT_WORKDIR"
      value: "/home/jenkins/agent"
    - name: "JENKINS_URL"
      value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/"
    image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2"
    name: "jnlp"
    resources:
      requests:
        memory: "256Mi"
        cpu: "100m"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  restartPolicy: "Never"
  securityContext:
    fsGroup: 1000
  volumes:
  - emptyDir: {}
    name: "volume-0"
  - emptyDir:
      medium: ""
    name: "workspace-volume"

Running on pingcap-tiflow-pull-cdc-integration-kafka-test-1855-rdz18-z9rmg in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/syncpoint/run.sh using Sink-Type: kafka... <<=================
kafka downstream isn't support syncpoint record
[Sun May  5 11:20:18 CST 2024] <<<<<< run test case syncpoint success! >>>>>>
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/hang_sink_suicide/run.sh using Sink-Type: kafka... <<=================
[Sun May  5 11:20:21 CST 2024] <<<<<< run test case hang_sink_suicide success! >>>>>>
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/server_config_compatibility/run.sh using Sink-Type: kafka... <<=================
[Sun May  5 11:20:24 CST 2024] <<<<<< run test case server_config_compatibility success! >>>>>>
Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc)
3723625472 bytes in 13.65 secs (272767207 bytes/sec)
[Pipeline] {
[Pipeline] cache
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_dup_error_restart/run.sh using Sink-Type: kafka... <<=================
[Sun May  5 11:20:28 CST 2024] <<<<<< run test case changefeed_dup_error_restart success! >>>>>>
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_big_messages/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
start tidb cluster in /tmp/tidb_cdc_test/kafka_big_messages
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc)
3723625472 bytes in 15.52 secs (239964963 bytes/sec)
[Pipeline] {
[Pipeline] cache
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b12b9ac0013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd, pid:1914, start at 2024-05-05 11:20:45.952460741 +0800 CST m=+5.230356166	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:22:45.960 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:20:45.931 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:10:45.931 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b12b9ac0013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd, pid:1914, start at 2024-05-05 11:20:45.952460741 +0800 CST m=+5.230356166	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:22:45.960 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:20:45.931 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:10:45.931 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b12ba780015	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd, pid:1994, start at 2024-05-05 11:20:46.026972055 +0800 CST m=+5.251528258	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:22:46.036 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:20:46.032 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:10:46.032 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/kafka_big_messages/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/kafka_big_messages/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_big_messages/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_big_messages/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_big_messages/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
[Sun May  5 11:20:51 CST 2024] <<<<<< START cdc server in kafka_big_messages case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_big_messages.33643366.out server --log-file /tmp/tidb_cdc_test/kafka_big_messages/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_big_messages/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:20:54 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/7c023088-499b-4d06-b8d6-f7beb7685d2e
	{"id":"7c023088-499b-4d06-b8d6-f7beb7685d2e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879251}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c48244cc
	7c023088-499b-4d06-b8d6-f7beb7685d2e

/tidb/cdc/default/default/upstream/7365350243656013355
	{"id":7365350243656013355,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/7c023088-499b-4d06-b8d6-f7beb7685d2e
	{"id":"7c023088-499b-4d06-b8d6-f7beb7685d2e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879251}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c48244cc
	7c023088-499b-4d06-b8d6-f7beb7685d2e

/tidb/cdc/default/default/upstream/7365350243656013355
	{"id":7365350243656013355,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/7c023088-499b-4d06-b8d6-f7beb7685d2e
	{"id":"7c023088-499b-4d06-b8d6-f7beb7685d2e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879251}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c48244cc
	7c023088-499b-4d06-b8d6-f7beb7685d2e

/tidb/cdc/default/default/upstream/7365350243656013355
	{"id":7365350243656013355,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
Create changefeed successfully!
ID: 227f2f90-e984-454f-8775-e0f60ede7a5a
Info: {"upstream_id":7365350243656013355,"namespace":"default","id":"227f2f90-e984-454f-8775-e0f60ede7a5a","sink_uri":"kafka://127.0.0.1:9092/big-message-test?protocol=open-protocol\u0026partition-num=1\u0026kafka-version=2.4.1\u0026max-message-bytes=12582912","create_time":"2024-05-05T11:20:54.445960278+08:00","start_ts":449545306408484865,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545306408484865,"checkpoint_ts":449545306408484865,"checkpoint_time":"2024-05-05 11:20:51.131"}
[Sun May  5 11:20:54 CST 2024] <<<<<< START kafka consumer in kafka_big_messages case >>>>>>
Starting generate kafka big messages...
go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f
go: downloading go.uber.org/atomic v1.11.0
table kafka_big_messages.test exists
check diff failed 1-th time, retry later
check diff failed 2-th time, retry later
check diff successfully
wait process cdc.test exit for 1-th time...
wait process cdc.test exit for 2-th time...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:21:07 CST 2024] <<<<<< run test case kafka_big_messages success! >>>>>>
Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc)
3723625472 bytes in 20.18 secs (184493403 bytes/sec)
[Pipeline] {
[Pipeline] cache
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
start tidb cluster in /tmp/tidb_cdc_test/kafka_compression
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc)
3723625472 bytes in 14.61 secs (254917175 bytes/sec)
[Pipeline] {
[Pipeline] cache
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b158bfc0016	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd, pid:5765, start at 2024-05-05 11:21:32.205414178 +0800 CST m=+5.206391498	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:23:32.212 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:21:32.209 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:11:32.209 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b158bfc0016	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd, pid:5765, start at 2024-05-05 11:21:32.205414178 +0800 CST m=+5.206391498	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:23:32.212 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:21:32.209 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:11:32.209 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b158d780015	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd, pid:5851, start at 2024-05-05 11:21:32.294179482 +0800 CST m=+5.234054173	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:23:32.301 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:21:32.304 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:11:32.304 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/kafka_compression/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/kafka_compression/tiflash/log/error.log
arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_compression/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_compression/tiflash-proxy.toml"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_compression/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
[Sun May  5 11:21:37 CST 2024] <<<<<< START cdc server in kafka_compression case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.72557257.out server --log-file /tmp/tidb_cdc_test/kafka_compression/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_compression/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:21:40 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/812563a3-cfdd-445f-b6fc-e7bbb340a07d
	{"id":"812563a3-cfdd-445f-b6fc-e7bbb340a07d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879297}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c53aefca
	812563a3-cfdd-445f-b6fc-e7bbb340a07d

/tidb/cdc/default/default/upstream/7365350438194238204
	{"id":7365350438194238204,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/812563a3-cfdd-445f-b6fc-e7bbb340a07d
	{"id":"812563a3-cfdd-445f-b6fc-e7bbb340a07d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879297}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c53aefca
	812563a3-cfdd-445f-b6fc-e7bbb340a07d

/tidb/cdc/default/default/upstream/7365350438194238204
	{"id":7365350438194238204,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/812563a3-cfdd-445f-b6fc-e7bbb340a07d
	{"id":"812563a3-cfdd-445f-b6fc-e7bbb340a07d","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879297}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c53aefca
	812563a3-cfdd-445f-b6fc-e7bbb340a07d

/tidb/cdc/default/default/upstream/7365350438194238204
	{"id":7365350438194238204,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7311.out cli tso query --pd=http://127.0.0.1:2379
+ set +x
+ tso='449545319405060101
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545319405060101 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7351.out cli changefeed create --start-ts=449545319405060101 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-gzip-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=gzip' -c gzip
Create changefeed successfully!
ID: gzip
Info: {"upstream_id":7365350438194238204,"namespace":"default","id":"gzip","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-gzip-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=gzip","create_time":"2024-05-05T11:21:42.668521504+08:00","start_ts":449545319405060101,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545319405060101,"checkpoint_ts":449545319405060101,"checkpoint_time":"2024-05-05 11:21:40.709"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc)
3723625472 bytes in 13.26 secs (280813175 bytes/sec)
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
+ set +x
[Sun May  5 11:21:44 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>>
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 6 min 0 sec
[Pipeline] {
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/05/05 11:21:42.630 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"]
[2024/05/05 11:21:42.663 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"]
[2024/05/05 11:21:42.790 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"]
[2024/05/05 11:21:42.799 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"]
[2024/05/05 11:21:43.764 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"]
[2024/05/05 11:21:43.773 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"]: syntax error: operand expected (error token is "[2024/05/05 11:21:42.630 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"]
[2024/05/05 11:21:42.663 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"]
[2024/05/05 11:21:42.790 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"]
[2024/05/05 11:21:42.799 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"]
[2024/05/05 11:21:43.764 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"]
[2024/05/05 11:21:43.773 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses gzip compression algorithm"]")
table test.gzip_finish_mark not exists for 1-th check, retry later
[Pipeline] timeout
Timeout set to expire in 6 min 0 sec
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 6 min 0 sec
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 6 min 0 sec
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 6 min 0 sec
[Pipeline] {
[Pipeline] sh
[Pipeline] sh
+ echo Waiting for zookeeper to be ready...
Waiting for zookeeper to be ready...
+ nc -z localhost 2181
+ echo Waiting for kafka to be ready...
Waiting for kafka to be ready...
+ nc -z localhost 9092
+ echo Waiting for kafka-broker to be ready...
Waiting for kafka-broker to be ready...
+ echo dump
+ nc localhost 2181
+ grep brokers
+ awk {$1=$1;print}
+ grep -F -w /brokers/ids/1
/brokers/ids/1
[Pipeline] sh
+ echo Waiting for zookeeper to be ready...
Waiting for zookeeper to be ready...
+ nc -z localhost 2181
+ echo Waiting for kafka to be ready...
Waiting for kafka to be ready...
+ nc -z localhost 9092
+ echo Waiting for kafka-broker to be ready...
Waiting for kafka-broker to be ready...
+ echo dump
+ nc localhost 2181
+ grep brokers
+ awk {$1=$1;print}
+ grep -F -w /brokers/ids/1
/brokers/ids/1
[Pipeline] sh
+ echo Waiting for zookeeper to be ready...
Waiting for zookeeper to be ready...
+ nc -z localhost 2181
+ echo Waiting for kafka to be ready...
Waiting for kafka to be ready...
+ nc -z localhost 9092
+ echo Waiting for kafka-broker to be ready...
Waiting for kafka-broker to be ready...
+ echo dump
+ nc localhost 2181
+ grep brokers
+ awk {$1=$1;print}
+ grep -F -w /brokers/ids/1
/brokers/ids/1
[Pipeline] sh
+ echo Waiting for zookeeper to be ready...
Waiting for zookeeper to be ready...
+ nc -z localhost 2181
+ echo Waiting for kafka to be ready...
Waiting for kafka to be ready...
+ nc -z localhost 9092
+ echo Waiting for kafka-broker to be ready...
Waiting for kafka-broker to be ready...
+ echo dump
+ nc localhost 2181
+ grep brokers
+ awk {$1=$1;print}
+ grep -F -w /brokers/ids/1
/brokers/ids/1
+ echo Waiting for zookeeper to be ready...
Waiting for zookeeper to be ready...
+ nc -z localhost 2181
+ echo Waiting for kafka to be ready...
Waiting for kafka to be ready...
+ nc -z localhost 9092
+ echo Waiting for kafka-broker to be ready...
Waiting for kafka-broker to be ready...
+ echo dump
+ nc localhost 2181
+ grep brokers
+ awk {$1=$1;print}
+ grep -F -w /brokers/ids/1
/brokers/ids/1
table test.gzip_finish_mark not exists for 2-th check, retry later
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] {
[Pipeline] {
[Pipeline] {
[Pipeline] {
[Pipeline] {
[Pipeline] {
[Pipeline] }
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] checkout
[Pipeline] checkout
The recommended git tool is: git
[Pipeline] checkout
The recommended git tool is: git
[Pipeline] checkout
The recommended git tool is: git
[Pipeline] checkout
The recommended git tool is: git
[Pipeline] checkout
The recommended git tool is: git
The recommended git tool is: git
[Pipeline] timeout
Timeout set to expire in 45 min
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 45 min
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 45 min
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 45 min
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 45 min
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 45 min
[Pipeline] {
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] withCredentials
Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN
[Pipeline] withCredentials
Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN
[Pipeline] withCredentials
Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN
[Pipeline] withCredentials
Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN
[Pipeline] withCredentials
Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN
[Pipeline] withCredentials
Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // container
[Pipeline] // container
table test.gzip_finish_mark exists
check diff successfully
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7475.out cli changefeed pause -c gzip
[Pipeline] // container
No credentials specified
Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@509abde0; decorates RemoteLauncher[hudson.remoting.Channel@12b6d106:JNLP4-connect connection from 10.233.90.216/10.233.90.216:51292] will be ignored (a typical symptom is the Git executable not being run inside a designated container)
Cloning the remote Git repository
Using shallow clone with depth 1
No credentials specified
Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@2f9ba219; decorates RemoteLauncher[hudson.remoting.Channel@345ac6bf:JNLP4-connect connection from 10.233.93.194/10.233.93.194:38784] will be ignored (a typical symptom is the Git executable not being run inside a designated container)
Cloning the remote Git repository
Using shallow clone with depth 1
[Pipeline] // container
[Pipeline] sh
No credentials specified
Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@5af4cf47; decorates RemoteLauncher[hudson.remoting.Channel@3f3a1e60:JNLP4-connect connection from 10.233.67.140/10.233.67.140:47728] will be ignored (a typical symptom is the Git executable not being run inside a designated container)
Cloning the remote Git repository
Using shallow clone with depth 1
Cloning repository https://github.com/PingCAP-QE/ci.git
 > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10
Cloning repository https://github.com/PingCAP-QE/ci.git
 > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10
Fetching upstream changes from https://github.com/PingCAP-QE/ci.git
 > git --version # timeout=10
 > git --version # 'git version 2.39.2'
 > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
No credentials specified
Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@4791a92f; decorates RemoteLauncher[hudson.remoting.Channel@6cc60794:JNLP4-connect connection from 10.233.66.45/10.233.66.45:46146] will be ignored (a typical symptom is the Git executable not being run inside a designated container)
Cloning the remote Git repository
Using shallow clone with depth 1
No credentials specified
Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@5b385f64; decorates RemoteLauncher[hudson.remoting.Channel@76e68e33:JNLP4-connect connection from 10.233.69.108/10.233.69.108:54300] will be ignored (a typical symptom is the Git executable not being run inside a designated container)
Cloning the remote Git repository
Using shallow clone with depth 1
No credentials specified
Warning: JENKINS-30600: special launcher org.csanchez.jenkins.plugins.kubernetes.pipeline.ContainerExecDecorator$1@f561ffc; decorates RemoteLauncher[hudson.remoting.Channel@78d7af9d:JNLP4-connect connection from 10.233.68.40/10.233.68.40:43498] will be ignored (a typical symptom is the Git executable not being run inside a designated container)
Cloning the remote Git repository
Using shallow clone with depth 1
Cloning repository https://github.com/PingCAP-QE/ci.git
Cloning repository https://github.com/PingCAP-QE/ci.git
 > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10
Fetching upstream changes from https://github.com/PingCAP-QE/ci.git
 > git --version # timeout=10
 > git --version # 'git version 2.39.2'
 > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5
Cloning repository https://github.com/PingCAP-QE/ci.git
 > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10
Fetching upstream changes from https://github.com/PingCAP-QE/ci.git
 > git --version # timeout=10
 > git --version # 'git version 2.39.2'
 > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5
[Pipeline] sh
[Pipeline] sh
+ rm -rf /tmp/tidb_cdc_test
+ mkdir -p /tmp/tidb_cdc_test
+ chmod +x ./tests/integration_tests/run_group.sh
+ ./tests/integration_tests/run_group.sh kafka G05
Run cases: charset_gbk ddl_manager multi_source
PROW_JOB_ID=d886c6eb-17f2-4f64-86e7-91214509cf93
JENKINS_NODE_COOKIE=aaf44a3b-ea41-4ded-ac2e-f5c239e8fd8a
BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/
GOLANG_VERSION=1.21.0
HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pfbpq-pmpwv
HUDSON_SERVER_COOKIE=83ef27fe9acccc92
KUBERNETES_PORT_443_TCP_PORT=443
KUBERNETES_PORT=tcp://10.233.0.1:443
TERM=xterm
STAGE_NAME=Test
BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855
KUBERNETES_SERVICE_PORT=443
GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009
JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786957579159605252","prowjobid":"d886c6eb-17f2-4f64-86e7-91214509cf93","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","author_link":"https://github.com/lidezhu"}]}}
KUBERNETES_SERVICE_HOST=10.233.0.1
WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/
RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=changes
RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=artifacts
FILE_SERVER_URL=http://fileserver.pingcap.net
JENKINS_HOME=/var/jenkins_home
GIT_COMMIT=03312178c534dce949face80c69812d989e55009
PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin
RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect
GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct
POD_CONTAINER=golang
PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
HUDSON_URL=https://do.pingcap.net/jenkins/
TICDC_COVERALLS_TOKEN=****
JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test
TZ=Asia/Shanghai
BUILD_DISPLAY_NAME=#1855
TEST_GROUP=G05
JENKINS_URL=https://do.pingcap.net/jenkins/
BUILD_ID=1786957579159605252
TICDC_CODECOV_TOKEN=****
GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742
JOB_BASE_NAME=pull_cdc_integration_kafka_test
GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009
RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=tests
SHLVL=5
HOME=/home/jenkins
POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-pfbpq
GOROOT=/usr/local/go
GIT_BRANCH=origin/main
KUBERNETES_PORT_443_TCP_PROTO=tcp
TINI_VERSION=v0.19.0
CI=true
KUBERNETES_SERVICE_PORT_HTTPS=443
WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp
EXECUTOR_NUMBER=0
JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a
NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-pfbpq pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pfbpq-pmpwv
GIT_URL=https://github.com/PingCAP-QE/ci.git
HUDSON_HOME=/var/jenkins_home
CLASSPATH=
NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pfbpq-pmpwv
GOPATH=/go
JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect
BUILD_NUMBER=1855
KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1
KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443
GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz
_=/usr/bin/env
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/charset_gbk/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
[Pipeline] sh
Fetching upstream changes from https://github.com/PingCAP-QE/ci.git
 > git --version # timeout=10
 > git --version # 'git version 2.39.2'
 > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5
+ rm -rf /tmp/tidb_cdc_test
+ mkdir -p /tmp/tidb_cdc_test
+ chmod +x ./tests/integration_tests/run_group.sh
+ ./tests/integration_tests/run_group.sh kafka G04
Run cases: foreign_key ddl_puller_lag ddl_only_block_related_table changefeed_auto_stop
PROW_JOB_ID=d886c6eb-17f2-4f64-86e7-91214509cf93
JENKINS_NODE_COOKIE=d1cb71ef-877c-4561-8fea-7e6112bc939b
BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/
GOLANG_VERSION=1.21.0
HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-6skrl-cxvmd
HUDSON_SERVER_COOKIE=83ef27fe9acccc92
KUBERNETES_PORT_443_TCP_PORT=443
KUBERNETES_PORT=tcp://10.233.0.1:443
TERM=xterm
STAGE_NAME=Test
BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855
KUBERNETES_SERVICE_PORT=443
GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009
JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786957579159605252","prowjobid":"d886c6eb-17f2-4f64-86e7-91214509cf93","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","author_link":"https://github.com/lidezhu"}]}}
KUBERNETES_SERVICE_HOST=10.233.0.1
WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/
RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=changes
RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=artifacts
FILE_SERVER_URL=http://fileserver.pingcap.net
JENKINS_HOME=/var/jenkins_home
GIT_COMMIT=03312178c534dce949face80c69812d989e55009
PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin
RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect
GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct
POD_CONTAINER=golang
PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
HUDSON_URL=https://do.pingcap.net/jenkins/
TICDC_COVERALLS_TOKEN=****
JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test
TZ=Asia/Shanghai
BUILD_DISPLAY_NAME=#1855
TEST_GROUP=G04
JENKINS_URL=https://do.pingcap.net/jenkins/
BUILD_ID=1786957579159605252
TICDC_CODECOV_TOKEN=****
GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742
JOB_BASE_NAME=pull_cdc_integration_kafka_test
GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009
RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=tests
SHLVL=5
HOME=/home/jenkins
POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-6skrl
GOROOT=/usr/local/go
GIT_BRANCH=origin/main
KUBERNETES_PORT_443_TCP_PROTO=tcp
TINI_VERSION=v0.19.0
CI=true
KUBERNETES_SERVICE_PORT_HTTPS=443
WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp
EXECUTOR_NUMBER=0
JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a
NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-6skrl pingcap-tiflow-pull-cdc-integration-kafka-test-1855-6skrl-cxvmd
GIT_URL=https://github.com/PingCAP-QE/ci.git
HUDSON_HOME=/var/jenkins_home
CLASSPATH=
NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-6skrl-cxvmd
GOPATH=/go
JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect
BUILD_NUMBER=1855
KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1
KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443
GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz
_=/usr/bin/env
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/foreign_key/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
Cloning repository https://github.com/PingCAP-QE/ci.git
 > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10
Fetching upstream changes from https://github.com/PingCAP-QE/ci.git
 > git --version # timeout=10
 > git --version # 'git version 2.39.2'
 > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5
+ rm -rf /tmp/tidb_cdc_test
+ mkdir -p /tmp/tidb_cdc_test
+ chmod +x ./tests/integration_tests/run_group.sh
+ ./tests/integration_tests/run_group.sh kafka G01
[Pipeline] {
[Pipeline] {
[Pipeline] {
[Pipeline] {
 > git init /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test # timeout=10
Fetching upstream changes from https://github.com/PingCAP-QE/ci.git
 > git --version # timeout=10
 > git --version # 'git version 2.39.2'
 > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5
[Pipeline] {
[Pipeline] {
[Pipeline] // container
[Pipeline] sh
+ rm -rf /tmp/tidb_cdc_test
+ mkdir -p /tmp/tidb_cdc_test
+ chmod +x ./tests/integration_tests/run_group.sh
+ ./tests/integration_tests/run_group.sh kafka G02
Run cases: consistent_replicate_ddl consistent_replicate_gbk consistent_replicate_nfs consistent_replicate_storage_file consistent_replicate_storage_file_large_value consistent_replicate_storage_s3 consistent_partition_table kafka_big_messages_v2 multi_tables_ddl_v2 multi_topics_v2 storage_cleanup csv_storage_basic csv_storage_multi_tables_ddl csv_storage_partition_table
PROW_JOB_ID=d886c6eb-17f2-4f64-86e7-91214509cf93
JENKINS_NODE_COOKIE=46e4f944-2610-4aea-a60c-aa3f1a5d7b2c
BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/
GOLANG_VERSION=1.21.0
HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-2hg1d-32fk1
HUDSON_SERVER_COOKIE=83ef27fe9acccc92
KUBERNETES_PORT_443_TCP_PORT=443
KUBERNETES_PORT=tcp://10.233.0.1:443
TERM=xterm
STAGE_NAME=Test
BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855
KUBERNETES_SERVICE_PORT=443
GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009
JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786957579159605252","prowjobid":"d886c6eb-17f2-4f64-86e7-91214509cf93","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","author_link":"https://github.com/lidezhu"}]}}
KUBERNETES_SERVICE_HOST=10.233.0.1
WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/
RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=changes
RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=artifacts
FILE_SERVER_URL=http://fileserver.pingcap.net
JENKINS_HOME=/var/jenkins_home
GIT_COMMIT=03312178c534dce949face80c69812d989e55009
PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin
RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect
GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct
POD_CONTAINER=golang
PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
HUDSON_URL=https://do.pingcap.net/jenkins/
TICDC_COVERALLS_TOKEN=****
JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test
TZ=Asia/Shanghai
BUILD_DISPLAY_NAME=#1855
TEST_GROUP=G02
JENKINS_URL=https://do.pingcap.net/jenkins/
BUILD_ID=1786957579159605252
TICDC_CODECOV_TOKEN=****
GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742
JOB_BASE_NAME=pull_cdc_integration_kafka_test
GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009
RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=tests
SHLVL=5
HOME=/home/jenkins
POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-2hg1d
GOROOT=/usr/local/go
GIT_BRANCH=origin/main
KUBERNETES_PORT_443_TCP_PROTO=tcp
TINI_VERSION=v0.19.0
CI=true
KUBERNETES_SERVICE_PORT_HTTPS=443
WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp
EXECUTOR_NUMBER=0
JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a
NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-2hg1d pingcap-tiflow-pull-cdc-integration-kafka-test-1855-2hg1d-32fk1
GIT_URL=https://github.com/PingCAP-QE/ci.git
HUDSON_HOME=/var/jenkins_home
CLASSPATH=
NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-2hg1d-32fk1
GOPATH=/go
JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect
BUILD_NUMBER=1855
KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1
KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443
GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz
_=/usr/bin/env
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_ddl/run.sh using Sink-Type: kafka... <<=================
[Sun May  5 11:21:50 CST 2024] <<<<<< run test case consistent_replicate_ddl success! >>>>>>
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)

Run cases: http_api http_api_tls api_v2 http_api_tls_with_user_auth cli_tls_with_auth kafka_simple_basic kafka_simple_basic_avro kafka_simple_handle_key_only kafka_simple_handle_key_only_avro kafka_simple_claim_check kafka_simple_claim_check_avro canal_json_adapter_compatibility canal_json_basic canal_json_content_compatible multi_topics avro_basic canal_json_handle_key_only open_protocol_handle_key_only canal_json_claim_check open_protocol_claim_check canal_json_storage_basic canal_json_storage_partition_table multi_tables_ddl
PROW_JOB_ID=d886c6eb-17f2-4f64-86e7-91214509cf93
JENKINS_NODE_COOKIE=91ac5fb1-bf6b-40c7-88ba-579cb2eccf5e
BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/
GOLANG_VERSION=1.21.0
HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0
HUDSON_SERVER_COOKIE=83ef27fe9acccc92
KUBERNETES_PORT_443_TCP_PORT=443
KUBERNETES_PORT=tcp://10.233.0.1:443
TERM=xterm
STAGE_NAME=Test
BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855
KUBERNETES_SERVICE_PORT=443
GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009
JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786957579159605252","prowjobid":"d886c6eb-17f2-4f64-86e7-91214509cf93","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","author_link":"https://github.com/lidezhu"}]}}
KUBERNETES_SERVICE_HOST=10.233.0.1
WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/
RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=changes
RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=artifacts
FILE_SERVER_URL=http://fileserver.pingcap.net
JENKINS_HOME=/var/jenkins_home
GIT_COMMIT=03312178c534dce949face80c69812d989e55009
PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin
RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect
GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct
POD_CONTAINER=golang
PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
HUDSON_URL=https://do.pingcap.net/jenkins/
TICDC_COVERALLS_TOKEN=****
JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test
TZ=Asia/Shanghai
BUILD_DISPLAY_NAME=#1855
TEST_GROUP=G01
JENKINS_URL=https://do.pingcap.net/jenkins/
BUILD_ID=1786957579159605252
TICDC_CODECOV_TOKEN=****
GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742
JOB_BASE_NAME=pull_cdc_integration_kafka_test
GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009
RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=tests
SHLVL=5
HOME=/home/jenkins
POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-wqs8r
GOROOT=/usr/local/go
GIT_BRANCH=origin/main
KUBERNETES_PORT_443_TCP_PROTO=tcp
TINI_VERSION=v0.19.0
CI=true
KUBERNETES_SERVICE_PORT_HTTPS=443
WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp
EXECUTOR_NUMBER=0
JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a
NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-wqs8r pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0
GIT_URL=https://github.com/PingCAP-QE/ci.git
HUDSON_HOME=/var/jenkins_home
CLASSPATH=
NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0
GOPATH=/go
JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect
BUILD_NUMBER=1855
KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1
KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443
GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz
_=/usr/bin/env
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/http_api/run.sh using Sink-Type: kafka... <<=================
[Sun May  5 11:21:50 CST 2024] <<<<<< run test case http_api success! >>>>>>
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7524.out cli changefeed remove -c gzip
+ rm -rf /tmp/tidb_cdc_test
+ mkdir -p /tmp/tidb_cdc_test
+ chmod +x ./tests/integration_tests/run_group.sh
+ ./tests/integration_tests/run_group.sh kafka G03
Run cases: row_format drop_many_tables processor_stop_delay partition_table
PROW_JOB_ID=d886c6eb-17f2-4f64-86e7-91214509cf93
JENKINS_NODE_COOKIE=801a21fd-6d3f-4e53-bd83-afe9e17ad857
BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/
GOLANG_VERSION=1.21.0
HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3cmhg-gg08m
HUDSON_SERVER_COOKIE=83ef27fe9acccc92
KUBERNETES_PORT=tcp://10.233.0.1:443
KUBERNETES_PORT_443_TCP_PORT=443
TERM=xterm
STAGE_NAME=Test
BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855
KUBERNETES_SERVICE_PORT=443
GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009
JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786957579159605252","prowjobid":"d886c6eb-17f2-4f64-86e7-91214509cf93","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","author_link":"https://github.com/lidezhu"}]}}
KUBERNETES_SERVICE_HOST=10.233.0.1
WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/
RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=changes
RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=artifacts
FILE_SERVER_URL=http://fileserver.pingcap.net
JENKINS_HOME=/var/jenkins_home
GIT_COMMIT=03312178c534dce949face80c69812d989e55009
PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin
RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect
GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct
POD_CONTAINER=golang
PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
HUDSON_URL=https://do.pingcap.net/jenkins/
TICDC_COVERALLS_TOKEN=****
JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test
TZ=Asia/Shanghai
BUILD_DISPLAY_NAME=#1855
TEST_GROUP=G03
JENKINS_URL=https://do.pingcap.net/jenkins/
BUILD_ID=1786957579159605252
TICDC_CODECOV_TOKEN=****
GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742
JOB_BASE_NAME=pull_cdc_integration_kafka_test
GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009
RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=tests
SHLVL=5
HOME=/home/jenkins
POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-3cmhg
GOROOT=/usr/local/go
GIT_BRANCH=origin/main
KUBERNETES_PORT_443_TCP_PROTO=tcp
TINI_VERSION=v0.19.0
CI=true
KUBERNETES_SERVICE_PORT_HTTPS=443
WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp
EXECUTOR_NUMBER=0
JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a
NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-3cmhg pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3cmhg-gg08m
GIT_URL=https://github.com/PingCAP-QE/ci.git
HUDSON_HOME=/var/jenkins_home
CLASSPATH=
NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3cmhg-gg08m
GOPATH=/go
JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect
BUILD_NUMBER=1855
KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1
KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443
GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz
_=/usr/bin/env
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/row_format/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
[Pipeline] dir
Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
[Pipeline] {
[Pipeline] dir
Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
[Pipeline] {
[Pipeline] dir
Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
[Pipeline] {
[Pipeline] dir
Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
[Pipeline] {
[Pipeline] dir
Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
[Pipeline] {
[Pipeline] dir
Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
[Pipeline] {
[Pipeline] cache
Changefeed remove successfully.
ID: gzip
CheckpointTs: 449545320322564141
SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-gzip-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=gzip
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
Avoid second fetch
Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main)
Commit message: "fix(br): use failpoint tidb-server instead (#2951)"
Avoid second fetch
Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main)
Commit message: "fix(br): use failpoint tidb-server instead (#2951)"
Avoid second fetch
Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main)
Avoid second fetch
Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main)
Avoid second fetch
Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main)
 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git rev-parse origin/main^{commit} # timeout=10
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10
 > git rev-parse origin/main^{commit} # timeout=10
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10
Avoid second fetch
Checking out Revision 03312178c534dce949face80c69812d989e55009 (origin/main)
 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git rev-parse origin/main^{commit} # timeout=10
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10
+ set +x
Commit message: "fix(br): use failpoint tidb-server instead (#2951)"
Commit message: "fix(br): use failpoint tidb-server instead (#2951)"
Commit message: "fix(br): use failpoint tidb-server instead (#2951)"
 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git rev-parse origin/main^{commit} # timeout=10
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10
 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git rev-parse origin/main^{commit} # timeout=10
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10
 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git rev-parse origin/main^{commit} # timeout=10
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 03312178c534dce949face80c69812d989e55009 # timeout=10
Commit message: "fix(br): use failpoint tidb-server instead (#2951)"
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7557.out cli tso query --pd=http://127.0.0.1:2379
start tidb cluster in /tmp/tidb_cdc_test/charset_gbk
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/http_api_tls/run.sh using Sink-Type: kafka... <<=================
[Sun May  5 11:21:53 CST 2024] <<<<<< run test case http_api_tls success! >>>>>>
start tidb cluster in /tmp/tidb_cdc_test/row_format
Starting Upstream PD...
start tidb cluster in /tmp/tidb_cdc_test/foreign_key
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_gbk/run.sh using Sink-Type: kafka... <<=================
* About to connect() to 127.0.0.1 port 24927 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:24927; Connection refused
* Closing connection 0

 You are running an older version of MinIO released 3 years ago 
 Update: Run `mc admin update` 


Attempting encryption of all config, IAM users and policies on MinIO backend
+ set +x
+ tso='449545322590109697
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545322590109697 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7591.out cli changefeed create --start-ts=449545322590109697 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-snappy-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=snappy' -c snappy
Create changefeed successfully!
ID: snappy
Info: {"upstream_id":7365350438194238204,"namespace":"default","id":"snappy","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-snappy-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=snappy","create_time":"2024-05-05T11:21:54.7178372+08:00","start_ts":449545322590109697,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545322590109697,"checkpoint_ts":449545322590109697,"checkpoint_time":"2024-05-05 11:21:52.859"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
Endpoint:  http://127.0.0.1:24927

Object API (Amazon S3 compatible):
   Go:         https://docs.min.io/docs/golang-client-quickstart-guide
   Java:       https://docs.min.io/docs/java-client-quickstart-guide
   Python:     https://docs.min.io/docs/python-client-quickstart-guide
   JavaScript: https://docs.min.io/docs/javascript-client-quickstart-guide
   .NET:       https://docs.min.io/docs/dotnet-client-quickstart-guide
* About to connect() to 127.0.0.1 port 24927 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 24927 (#0)
> GET / HTTP/1.1
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:24927
> Accept: */*
> 
< HTTP/1.1 403 Forbidden
< Accept-Ranges: bytes
< Content-Length: 226
< Content-Security-Policy: block-all-mixed-content
< Content-Type: application/xml
< Server: MinIO/RELEASE.2020-07-27T18-37-02Z
< Vary: Origin
< X-Amz-Request-Id: 17CC79A6E47DFA18
< X-Xss-Protection: 1; mode=block
< Date: Sun, 05 May 2024 03:21:55 GMT
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
Bucket 's3://logbucket/' created
[Sun May  5 11:21:55 CST 2024] <<<<<< run test case consistent_replicate_gbk success! >>>>>>
Exiting on signal: INTERRUPT
Verifying downstream PD is started...
+ set +x
[Sun May  5 11:21:56 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>>
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/05/05 11:21:54.681 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"]
[2024/05/05 11:21:54.713 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"]
[2024/05/05 11:21:54.820 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"]
[2024/05/05 11:21:54.829 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"]
[2024/05/05 11:21:55.816 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"]
[2024/05/05 11:21:55.824 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"]: syntax error: operand expected (error token is "[2024/05/05 11:21:54.681 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"]
[2024/05/05 11:21:54.713 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"]
[2024/05/05 11:21:54.820 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"]
[2024/05/05 11:21:54.829 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"]
[2024/05/05 11:21:55.816 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"]
[2024/05/05 11:21:55.824 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses snappy compression algorithm"]")
table test.snappy_finish_mark not exists for 1-th check, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table test.snappy_finish_mark not exists for 2-th check, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/api_v2/run.sh using Sink-Type: kafka... <<=================
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_nfs/run.sh using Sink-Type: kafka... <<=================
[Sun May  5 11:21:59 CST 2024] <<<<<< run test case consistent_replicate_nfs success! >>>>>>
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)

Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc)
3723625472 bytes in 7.42 secs (502103213 bytes/sec)
[Pipeline] {
[Pipeline] cache
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.snappy_finish_mark not exists for 3-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1757180018	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pfbpq-pmpwv, pid:1244, start at 2024-05-05 11:22:01.574413203 +0800 CST m=+5.043912029	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:24:01.581 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:22:01.542 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:12:01.542 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1757180018	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pfbpq-pmpwv, pid:1244, start at 2024-05-05 11:22:01.574413203 +0800 CST m=+5.043912029	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:24:01.581 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:22:01.542 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:12:01.542 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b175a38000d	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pfbpq-pmpwv, pid:1310, start at 2024-05-05 11:22:01.754519121 +0800 CST m=+5.176819013	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:24:01.761 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:22:01.742 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:12:01.742 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
table test.snappy_finish_mark exists
check diff successfully
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7721.out cli changefeed pause -c snappy
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/charset_gbk/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/charset_gbk/tiflash/log/error.log
arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/charset_gbk/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/charset_gbk/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/charset_gbk/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7754.out cli changefeed remove -c snappy
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_storage_file/run.sh using Sink-Type: kafka... <<=================
[Sun May  5 11:22:04 CST 2024] <<<<<< run test case consistent_replicate_storage_file success! >>>>>>
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)

ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Changefeed remove successfully.
ID: snappy
CheckpointTs: 449545325002096644
SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-snappy-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=snappy
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
[Sun May  5 11:22:04 CST 2024] <<<<<< START cdc server in charset_gbk case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ GO_FAILPOINTS=
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.charset_gbk.27782780.out server --log-file /tmp/tidb_cdc_test/charset_gbk/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/charset_gbk/cdc_data --cluster-id default
+ [[ no != \n\o ]]
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/http_api_tls_with_user_auth/run.sh using Sink-Type: kafka... <<=================
[Sun May  5 11:22:05 CST 2024] <<<<<< run test case http_api_tls_with_user_auth success! >>>>>>
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b178d700013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-6skrl-cxvmd, pid:1399, start at 2024-05-05 11:22:05.044777933 +0800 CST m=+5.351718591	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:24:05.053 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:22:05.020 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:12:05.020 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ set +x
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7789.out cli tso query --pd=http://127.0.0.1:2379
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_storage_file_large_value/run.sh using Sink-Type: kafka... <<=================
[Sun May  5 11:22:07 CST 2024] <<<<<< run test case consistent_replicate_storage_file_large_value success! >>>>>>
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)

+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b178d700013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-6skrl-cxvmd, pid:1399, start at 2024-05-05 11:22:05.044777933 +0800 CST m=+5.351718591	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:24:05.053 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:22:05.020 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:12:05.020 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b178d500014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-6skrl-cxvmd, pid:1482, start at 2024-05-05 11:22:05.033496067 +0800 CST m=+5.287460689	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:24:05.041 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:22:05.012 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:12:05.012 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/foreign_key/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/foreign_key/tiflash/log/error.log
arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/foreign_key/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/foreign_key/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/foreign_key/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:22:08 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/3c907d3a-a698-4204-9fe1-658ef474ea27
	{"id":"3c907d3a-a698-4204-9fe1-658ef474ea27","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879325}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c5b42bc3
	3c907d3a-a698-4204-9fe1-658ef474ea27

/tidb/cdc/default/default/upstream/7365350568182897187
	{"id":7365350568182897187,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/3c907d3a-a698-4204-9fe1-658ef474ea27
	{"id":"3c907d3a-a698-4204-9fe1-658ef474ea27","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879325}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c5b42bc3
	3c907d3a-a698-4204-9fe1-658ef474ea27

/tidb/cdc/default/default/upstream/7365350568182897187
	{"id":7365350568182897187,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/3c907d3a-a698-4204-9fe1-658ef474ea27
	{"id":"3c907d3a-a698-4204-9fe1-658ef474ea27","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879325}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c5b42bc3
	3c907d3a-a698-4204-9fe1-658ef474ea27

/tidb/cdc/default/default/upstream/7365350568182897187
	{"id":7365350568182897187,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
Create changefeed successfully!
ID: c0210e2e-a33e-4a16-803e-d66def19d599
Info: {"upstream_id":7365350568182897187,"namespace":"default","id":"c0210e2e-a33e-4a16-803e-d66def19d599","sink_uri":"mysql://normal:xxxxx@127.0.0.1:3306/","create_time":"2024-05-05T11:22:08.218407132+08:00","start_ts":449545325744750594,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545325744750594,"checkpoint_ts":449545325744750594,"checkpoint_time":"2024-05-05 11:22:04.893"}
[Sun May  5 11:22:08 CST 2024] <<<<<< START kafka consumer in charset_gbk case >>>>>>
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ set +x
+ tso='449545326260125701
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545326260125701 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7826.out cli changefeed create --start-ts=449545326260125701 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-lz4-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=lz4' -c lz4
Create changefeed successfully!
ID: lz4
Info: {"upstream_id":7365350438194238204,"namespace":"default","id":"lz4","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-lz4-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=lz4","create_time":"2024-05-05T11:22:08.817795576+08:00","start_ts":449545326260125701,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545326260125701,"checkpoint_ts":449545326260125701,"checkpoint_time":"2024-05-05 11:22:06.859"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/cli_tls_with_auth/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.foreign_key.cli.2808.out cli tso query --pd=http://127.0.0.1:2379
+ set +x
[Sun May  5 11:22:10 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>>
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/05/05 11:22:08.782 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"]
[2024/05/05 11:22:08.813 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"]
[2024/05/05 11:22:08.916 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"]
[2024/05/05 11:22:08.924 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"]
[2024/05/05 11:22:09.917 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"]
[2024/05/05 11:22:09.924 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"]: syntax error: operand expected (error token is "[2024/05/05 11:22:08.782 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"]
[2024/05/05 11:22:08.813 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"]
[2024/05/05 11:22:08.916 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"]
[2024/05/05 11:22:08.924 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"]
[2024/05/05 11:22:09.917 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"]
[2024/05/05 11:22:09.924 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses lz4 compression algorithm"]")
table test.lz4_finish_mark not exists for 1-th check, retry later
+ set +x
+ tso='449545327167668225
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545327167668225 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
[Sun May  5 11:22:11 CST 2024] <<<<<< START cdc server in foreign_key case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.foreign_key.28462848.out server --log-file /tmp/tidb_cdc_test/foreign_key/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/foreign_key/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
start tidb cluster in /tmp/tidb_cdc_test/cli_tls_with_auth
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b17d6f00019	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3cmhg-gg08m, pid:1408, start at 2024-05-05 11:22:09.759026149 +0800 CST m=+9.392768103	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:24:09.766 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:22:09.724 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:12:09.724 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b17d6f00019	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3cmhg-gg08m, pid:1408, start at 2024-05-05 11:22:09.759026149 +0800 CST m=+9.392768103	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:24:09.766 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:22:09.724 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:12:09.724 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b17d7a80015	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3cmhg-gg08m, pid:1491, start at 2024-05-05 11:22:09.807932563 +0800 CST m=+9.388413034	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:24:09.814 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:22:09.770 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:12:09.770 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/row_format/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/row_format/tiflash/log/error.log
arg matches is ArgMatches { args: {"log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/row_format/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/row_format/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/row_format/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table test.lz4_finish_mark not exists for 2-th check, retry later
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.row_format.cli.2814.out cli tso query --pd=http://127.0.0.1:2379
table test.lz4_finish_mark not exists for 3-th check, retry later
Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc)
3723625472 bytes in 12.36 secs (301368374 bytes/sec)
[Pipeline] {
[Pipeline] cache
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:22:14 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/4fe48083-a2a0-485b-8d48-d0f0e2877ad0
	{"id":"4fe48083-a2a0-485b-8d48-d0f0e2877ad0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879332}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c5b69dd7
	4fe48083-a2a0-485b-8d48-d0f0e2877ad0

/tidb/cdc/default/default/upstream/7365350576712075909
	{"id":7365350576712075909,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/4fe48083-a2a0-485b-8d48-d0f0e2877ad0
	{"id":"4fe48083-a2a0-485b-8d48-d0f0e2877ad0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879332}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c5b69dd7
	4fe48083-a2a0-485b-8d48-d0f0e2877ad0

/tidb/cdc/default/default/upstream/7365350576712075909
	{"id":7365350576712075909,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/4fe48083-a2a0-485b-8d48-d0f0e2877ad0
	{"id":"4fe48083-a2a0-485b-8d48-d0f0e2877ad0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879332}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c5b69dd7
	4fe48083-a2a0-485b-8d48-d0f0e2877ad0

/tidb/cdc/default/default/upstream/7365350576712075909
	{"id":7365350576712075909,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.foreign_key.cli.2910.out cli changefeed create --start-ts=449545327167668225 '--sink-uri=kafka://127.0.0.1:9092/ticdc-foreign-key-test-24171?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760'
Create changefeed successfully!
ID: b2e568a5-2d9d-46ea-acec-ad5368fb89cf
Info: {"upstream_id":7365350576712075909,"namespace":"default","id":"b2e568a5-2d9d-46ea-acec-ad5368fb89cf","sink_uri":"kafka://127.0.0.1:9092/ticdc-foreign-key-test-24171?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:22:15.478160767+08:00","start_ts":449545327167668225,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545327167668225,"checkpoint_ts":449545327167668225,"checkpoint_time":"2024-05-05 11:22:10.321"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_replicate_storage_s3/run.sh using Sink-Type: kafka... <<=================
* About to connect() to 127.0.0.1 port 24927 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:24927; Connection refused
* Closing connection 0

 You are running an older version of MinIO released 3 years ago 
 Update: Run `mc admin update` 


Attempting encryption of all config, IAM users and policies on MinIO backend
Endpoint:  http://127.0.0.1:24927

Object API (Amazon S3 compatible):
   Go:         https://docs.min.io/docs/golang-client-quickstart-guide
   Java:       https://docs.min.io/docs/java-client-quickstart-guide
   Python:     https://docs.min.io/docs/python-client-quickstart-guide
   JavaScript: https://docs.min.io/docs/javascript-client-quickstart-guide
   .NET:       https://docs.min.io/docs/dotnet-client-quickstart-guide
+ set +x
+ tso='449545328400793601
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545328400793601 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
[Sun May  5 11:22:16 CST 2024] <<<<<< START cdc server in row_format case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ GO_FAILPOINTS=
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.row_format.28562858.out server --log-file /tmp/tidb_cdc_test/row_format/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/row_format/cdc_data --cluster-id default
+ [[ no != \n\o ]]
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
* About to connect() to 127.0.0.1 port 24927 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 24927 (#0)
> GET / HTTP/1.1
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:24927
> Accept: */*
> 
< HTTP/1.1 403 Forbidden
< Accept-Ranges: bytes
< Content-Length: 226
< Content-Security-Policy: block-all-mixed-content
< Content-Type: application/xml
< Server: MinIO/RELEASE.2020-07-27T18-37-02Z
< Vary: Origin
< X-Amz-Request-Id: 17CC79ABC888D99E
< X-Xss-Protection: 1; mode=block
< Date: Sun, 05 May 2024 03:22:16 GMT
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
table test.lz4_finish_mark exists
check diff successfully
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.7974.out cli changefeed pause -c lz4
+ set +x
[Sun May  5 11:22:16 CST 2024] <<<<<< START kafka consumer in foreign_key case >>>>>>
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Bucket 's3://logbucket/' created
[Sun May  5 11:22:16 CST 2024] <<<<<< run test case consistent_replicate_storage_s3 success! >>>>>>
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)

Exiting on signal: INTERRUPT
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.8002.out cli changefeed remove -c lz4
Changefeed remove successfully.
ID: lz4
CheckpointTs: 449545328671850503
SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-lz4-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=lz4
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
table foreign_key.finish_mark not exists for 1-th check, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:22:19 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/deaa864d-9c3c-42e7-911e-d5e3b033c017
	{"id":"deaa864d-9c3c-42e7-911e-d5e3b033c017","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879336}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c5b95cda
	deaa864d-9c3c-42e7-911e-d5e3b033c017

/tidb/cdc/default/default/upstream/7365350585557502874
	{"id":7365350585557502874,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/deaa864d-9c3c-42e7-911e-d5e3b033c017
	{"id":"deaa864d-9c3c-42e7-911e-d5e3b033c017","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879336}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c5b95cda
	deaa864d-9c3c-42e7-911e-d5e3b033c017

/tidb/cdc/default/default/upstream/7365350585557502874
	{"id":7365350585557502874,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/deaa864d-9c3c-42e7-911e-d5e3b033c017
	{"id":"deaa864d-9c3c-42e7-911e-d5e3b033c017","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879336}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c5b95cda
	deaa864d-9c3c-42e7-911e-d5e3b033c017

/tidb/cdc/default/default/upstream/7365350585557502874
	{"id":7365350585557502874,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.row_format.cli.2903.out cli changefeed create --start-ts=449545328400793601 '--sink-uri=kafka://127.0.0.1:9092/ticdc-row-format-test-4288?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760'
Create changefeed successfully!
ID: 840a5946-48b9-4723-acc7-b59f284af2b7
Info: {"upstream_id":7365350585557502874,"namespace":"default","id":"840a5946-48b9-4723-acc7-b59f284af2b7","sink_uri":"kafka://127.0.0.1:9092/ticdc-row-format-test-4288?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:22:20.102462502+08:00","start_ts":449545328400793601,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545328400793601,"checkpoint_ts":449545328400793601,"checkpoint_time":"2024-05-05 11:22:15.025"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
+ set +x
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.8038.out cli tso query --pd=http://127.0.0.1:2379
table foreign_key.finish_mark not exists for 2-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ set +x
[Sun May  5 11:22:21 CST 2024] <<<<<< START kafka consumer in row_format case >>>>>>
+ set +x
+ tso='449545329956356099
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545329956356099 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.8072.out cli changefeed create --start-ts=449545329956356099 '--sink-uri=kafka://127.0.0.1:9092/ticdc-kafka-compression-zstd-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=zstd' -c zstd
Create changefeed successfully!
ID: zstd
Info: {"upstream_id":7365350438194238204,"namespace":"default","id":"zstd","sink_uri":"kafka://127.0.0.1:9092/ticdc-kafka-compression-zstd-test?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1\u0026compression=zstd","create_time":"2024-05-05T11:22:22.870718859+08:00","start_ts":449545329956356099,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545329956356099,"checkpoint_ts":449545329956356099,"checkpoint_time":"2024-05-05 11:22:20.959"}
PASS
table charset_gbk_test0.t0 exists
table charset_gbk_test0.t1 exists
table charset_gbk_test1.t0 exists
table test.finish_mark not exists for 1-th check, retry later
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
table foreign_key.finish_mark not exists for 3-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/consistent_partition_table/run.sh using Sink-Type: kafka... <<=================
[Sun May  5 11:22:23 CST 2024] <<<<<< run test case consistent_partition_table success! >>>>>>
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)

table test.finish_mark not exists for 2-th check, retry later
+ set +x
[Sun May  5 11:22:24 CST 2024] <<<<<< START kafka consumer in kafka_compression case >>>>>>
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_compression/run.sh: line 22: [[: [2024/05/05 11:22:22.833 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"]
[2024/05/05 11:22:22.867 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"]
[2024/05/05 11:22:22.966 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"]
[2024/05/05 11:22:22.974 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"]
[2024/05/05 11:22:23.967 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"]
[2024/05/05 11:22:23.974 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"]: syntax error: operand expected (error token is "[2024/05/05 11:22:22.833 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"]
[2024/05/05 11:22:22.867 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"]
[2024/05/05 11:22:22.966 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"]
[2024/05/05 11:22:22.974 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"]
[2024/05/05 11:22:23.967 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"]
[2024/05/05 11:22:23.974 +08:00] [INFO] [sarama.go:96] ["Kafka producer uses zstd compression algorithm"]")
table test.zstd_finish_mark not exists for 1-th check, retry later
table foreign_key.finish_mark not exists for 4-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish_mark not exists for 3-th check, retry later
table foreign_key.finish_mark not exists for 5-th check, retry later
table test.zstd_finish_mark not exists for 2-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b18c3e0000e	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:1708, start at 2024-05-05 11:22:24.901303151 +0800 CST m=+5.826068658	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:24:24.907 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:22:24.888 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:12:24.888 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b18c3e0000e	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:1708, start at 2024-05-05 11:22:24.901303151 +0800 CST m=+5.826068658	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:24:24.907 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:22:24.888 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:12:24.888 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b18c3040006	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:1792, start at 2024-05-05 11:22:24.836027378 +0800 CST m=+5.706151197	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:24:24.843 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:22:24.833 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:12:24.833 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/cli_tls_with_auth/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/cli_tls_with_auth/tiflash/log/error.log
arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/cli_tls_with_auth/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/cli_tls_with_auth/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/cli_tls_with_auth/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table test.finish_mark exists
check table exists success
check diff successfully
wait process cdc.test exit for 1-th time...
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_big_messages_v2/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
wait process cdc.test exit for 2-th time...
table test.zstd_finish_mark exists
check diff successfully
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.8195.out cli changefeed pause -c zstd
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:22:29 CST 2024] <<<<<< run test case charset_gbk success! >>>>>>
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
table foreign_key.finish_mark not exists for 6-th check, retry later
The 1 times to try to start tls tidb cluster...
start tidb cluster in /tmp/tidb_cdc_test/cli_tls_with_auth
Starting TLS PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_compression.cli.8229.out cli changefeed remove -c zstd
Starting TLS TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table foreign_key.finish_mark not exists for 7-th check, retry later
Changefeed remove successfully.
ID: zstd
CheckpointTs: 449545330860752934
SinkURI: kafka://127.0.0.1:9092/ticdc-kafka-compression-zstd-test?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1&compression=zstd
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
start tidb cluster in /tmp/tidb_cdc_test/kafka_big_messages_v2
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
Starting TLS TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying TLS TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ set +x
table foreign_key.finish_mark not exists for 8-th check, retry later
wait process cdc.test exit for 1-th time...
wait process cdc.test exit for 2-th time...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:22:34 CST 2024] <<<<<< run test case kafka_compression success! >>>>>>
table foreign_key.finish_mark not exists for 9-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table foreign_key.finish_mark not exists for 10-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc)
3723625472 bytes in 19.75 secs (188584985 bytes/sec)
[Pipeline] {
[Pipeline] cache
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1991940010	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:3544, start at 2024-05-05 11:22:38.069944904 +0800 CST m=+5.316480397	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:24:38.079 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:22:38.053 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:12:38.053 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
+ pd_host=127.0.0.1
+ pd_port=2579
+ is_tls=true
+ '[' true == true ']'
++ run_cdc_cli tso query --pd=https://127.0.0.1:2579
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.3618.out cli tso query --pd=https://127.0.0.1:2579
table row_format.finish_mark not exists for 1-th check, retry later
table row_format.finish_mark not exists for 2-th check, retry later
table foreign_key.finish_mark not exists for 11-th check, retry later
table row_format.finish_mark not exists for 3-th check, retry later
+ set +x
+ tso='449545334752018433
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545334752018433 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
[Sun May  5 11:22:40 CST 2024] <<<<<< START cdc server in cli_tls_with_auth case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates ']'
+ curl_status_cmd='curl --cacert /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem --cert /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem --key /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem --user ticdc:ticdc_secret -vsL --max-time 20 https://127.0.0.1:8300/debug/info'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.36663668.out server --log-file /tmp/tidb_cdc_test/cli_tls_with_auth/cdc_cli_tls_with_auth_tls1.log --log-level debug --data-dir /tmp/tidb_cdc_test/cli_tls_with_auth/cdc_data_cli_tls_with_auth_tls1 --cluster-id default --config /tmp/tidb_cdc_test/cli_tls_with_auth/server.toml --ca /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem --cert /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/server.pem --key /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/server-key.pem --cert-allowed-cn client --addr 127.0.0.1:8300 --pd https://127.0.0.1:2579
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl --cacert /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem --cert /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem --key /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem --user ticdc:ticdc_secret -vsL --max-time 20 https://127.0.0.1:8300/debug/info
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/ddl_manager/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table foreign_key.finish_mark exists
check diff successfully
wait process cdc.test exit for 1-th time...
wait process cdc.test exit for 2-th time...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:22:42 CST 2024] <<<<<< run test case foreign_key success! >>>>>>
table row_format.finish_mark not exists for 4-th check, retry later
start tidb cluster in /tmp/tidb_cdc_test/ddl_manager
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
+ (( i++ ))
+ (( i <= 50 ))
++ curl --cacert /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem --cert /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem --key /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem --user ticdc:ticdc_secret -vsL --max-time 20 https://127.0.0.1:8300/debug/info
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Initializing NSS with certpath: sql:/etc/pki/nssdb
*   CAfile: /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem
  CApath: none
* NSS: client certificate from file
* 	subject: CN=client
* 	start date: Feb 18 07:48:00 2020 GMT
* 	expire date: Jan 25 07:48:00 2120 GMT
* 	common name: client
* 	issuer: CN=My own CA,O=PingCAP,L=Beijing,ST=Beijing,C=CN
* SSL connection using TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256
* Server certificate:
* 	subject: CN=tidb-server
* 	start date: Feb 18 09:11:00 2020 GMT
* 	expire date: Jan 25 09:11:00 2120 GMT
* 	common name: tidb-server
* 	issuer: CN=My own CA,O=PingCAP,L=Beijing,ST=Beijing,C=CN
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:22:44 GMT
< Content-Length: 1233
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/2f0012bc-0312-4b7d-9e7f-f5c2d962b306
	{"id":"2f0012bc-0312-4b7d-9e7f-f5c2d962b306","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879361}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/1f5d8f46c642f282
	2f0012bc-0312-4b7d-9e7f-f5c2d962b306

/tidb/cdc/default/default/upstream/7365350725032055970
	{"id":7365350725032055970,"pd-endpoints":"https://127.0.0.1:2579,https://127.0.0.1:2579","key-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/server-key.pem","cert-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/server.pem","ca-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem","cert-allowed-cn":["client","tidb-server"]}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/2f0012bc-0312-4b7d-9e7f-f5c2d962b306
	{"id":"2f0012bc-0312-4b7d-9e7f-f5c2d962b306","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879361}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/1f5d8f46c642f282
	2f0012bc-0312-4b7d-9e7f-f5c2d962b306

/tidb/cdc/default/default/upstream/7365350725032055970
	{"id":7365350725032055970,"pd-endpoints":"https://127.0.0.1:2579,https://127.0.0.1:2579","key-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/server-key.pem","cert-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/server.pem","ca-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem","cert-allowed-cn":["client","tidb-server"]}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/2f0012bc-0312-4b7d-9e7f-f5c2d962b306
	{"id":"2f0012bc-0312-4b7d-9e7f-f5c2d962b306","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879361}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/1f5d8f46c642f282
	2f0012bc-0312-4b7d-9e7f-f5c2d962b306

/tidb/cdc/default/default/upstream/7365350725032055970
	{"id":7365350725032055970,"pd-endpoints":"https://127.0.0.1:2579,https://127.0.0.1:2579","key-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/server-key.pem","cert-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/server.pem","ca-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem","cert-allowed-cn":["client","tidb-server"]}'
+ grep -q 'etcd info'
+ break
+ set +x
table row_format.finish_mark not exists for 5-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b19dff00011	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-2hg1d-32fk1, pid:2059, start at 2024-05-05 11:22:43.086978761 +0800 CST m=+5.328901887	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:24:43.093 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:22:43.068 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:12:43.068 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b19dff00011	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-2hg1d-32fk1, pid:2059, start at 2024-05-05 11:22:43.086978761 +0800 CST m=+5.328901887	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:24:43.093 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:22:43.068 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:12:43.068 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b19dfe40015	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-2hg1d-32fk1, pid:2148, start at 2024-05-05 11:22:43.105300227 +0800 CST m=+5.282789283	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:24:43.114 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:22:43.115 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:12:43.115 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/log/error.log
arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_big_messages_v2/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
Verifying downstream PD is started...
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_messages/run.sh using Sink-Type: kafka... <<=================
[Sun May  5 11:22:45 CST 2024] <<<<<< run test case kafka_messages success! >>>>>>
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.3734.out cli changefeed create --start-ts=449545334752018433 '--sink-uri=kafka://127.0.0.1:9092/ticdc-cli-test-12275?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --tz=Asia/Shanghai -c=custom-changefeed-name
[WARN] --tz is deprecated in changefeed settings.
table row_format.finish_mark not exists for 6-th check, retry later
Create changefeed successfully!
ID: custom-changefeed-name
Info: {"upstream_id":7365350725032055970,"namespace":"default","id":"custom-changefeed-name","sink_uri":"kafka://127.0.0.1:9092/ticdc-cli-test-12275?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:22:46.891973055+08:00","start_ts":449545334752018433,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545334752018433,"checkpoint_ts":449545334752018433,"checkpoint_time":"2024-05-05 11:22:39.253"}
PASS
[Sun May  5 11:22:47 CST 2024] <<<<<< START cdc server in kafka_big_messages_v2 case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ GO_FAILPOINTS=
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_big_messages_v2.36373639.out server --log-file /tmp/tidb_cdc_test/kafka_big_messages_v2/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_big_messages_v2/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc)
3723625472 bytes in 7.94 secs (468786319 bytes/sec)
[Pipeline] {
[Pipeline] cache
+ set +x
[Sun May  5 11:22:48 CST 2024] <<<<<< START kafka consumer in cli_tls_with_auth case >>>>>>
table test.simple not exists for 1-th check, retry later
table row_format.finish_mark not exists for 7-th check, retry later
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_sink_error_resume/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:22:50 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/0bf9daaf-2322-42e6-bfd9-1baedccf5fd4
	{"id":"0bf9daaf-2322-42e6-bfd9-1baedccf5fd4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879367}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c64b6fc7
	0bf9daaf-2322-42e6-bfd9-1baedccf5fd4

/tidb/cdc/default/default/upstream/7365350743671689316
	{"id":7365350743671689316,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/0bf9daaf-2322-42e6-bfd9-1baedccf5fd4
	{"id":"0bf9daaf-2322-42e6-bfd9-1baedccf5fd4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879367}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c64b6fc7
	0bf9daaf-2322-42e6-bfd9-1baedccf5fd4

/tidb/cdc/default/default/upstream/7365350743671689316
	{"id":7365350743671689316,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/0bf9daaf-2322-42e6-bfd9-1baedccf5fd4
	{"id":"0bf9daaf-2322-42e6-bfd9-1baedccf5fd4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879367}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c64b6fc7
	0bf9daaf-2322-42e6-bfd9-1baedccf5fd4

/tidb/cdc/default/default/upstream/7365350743671689316
	{"id":7365350743671689316,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
Create changefeed successfully!
ID: 5204c0da-6b89-485a-8039-d8c712420ec2
Info: {"upstream_id":7365350743671689316,"namespace":"default","id":"5204c0da-6b89-485a-8039-d8c712420ec2","sink_uri":"kafka://127.0.0.1:9092/big-message-test?protocol=open-protocol\u0026partition-num=1\u0026kafka-version=2.4.1\u0026max-message-bytes=12582912","create_time":"2024-05-05T11:22:50.348381576+08:00","start_ts":449545336787566593,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545336787566593,"checkpoint_ts":449545336787566593,"checkpoint_time":"2024-05-05 11:22:47.018"}
[Sun May  5 11:22:50 CST 2024] <<<<<< START kafka consumer in kafka_big_messages_v2 case >>>>>>
Starting generate kafka big messages...
table test.simple not exists for 2-th check, retry later
table row_format.finish_mark not exists for 8-th check, retry later
go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f
go: downloading go.uber.org/atomic v1.11.0
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
start tidb cluster in /tmp/tidb_cdc_test/kafka_sink_error_resume
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table test.simple exists
table test.`simple-dash` exists
+ endpoints=https://127.0.0.1:2579
+ changefeed_id=custom-changefeed-name
+ expected_state=normal
+ error_msg=null
+ tls_dir=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates
+ [[ https://127.0.0.1:2579 =~ https ]]
++ cdc cli changefeed query --ca=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem --cert=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem --key=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem --pd=https://127.0.0.1:2579 -c custom-changefeed-name -s
+ info='{
  "upstream_id": 7365350725032055970,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "normal",
  "checkpoint_tso": 449545335171448838,
  "checkpoint_time": "2024-05-05 11:22:40.853",
  "error": null
}'
+ echo '{
  "upstream_id": 7365350725032055970,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "normal",
  "checkpoint_tso": 449545335171448838,
  "checkpoint_time": "2024-05-05 11:22:40.853",
  "error": null
}'
{
  "upstream_id": 7365350725032055970,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "normal",
  "checkpoint_tso": 449545335171448838,
  "checkpoint_time": "2024-05-05 11:22:40.853",
  "error": null
}
++ echo '{' '"upstream_id":' 7365350725032055970, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449545335171448838, '"checkpoint_time":' '"2024-05-05' '11:22:40.853",' '"error":' null '}'
++ jq -r .state
+ state=normal
+ [[ ! normal == \n\o\r\m\a\l ]]
++ echo '{' '"upstream_id":' 7365350725032055970, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449545335171448838, '"checkpoint_time":' '"2024-05-05' '11:22:40.853",' '"error":' null '}'
++ jq -r .error.message
+ message=null
+ [[ ! null =~ null ]]
changefeed count 1 check pass, pd_addr: https://127.0.0.1:2579
table row_format.finish_mark exists
check diff successfully
wait process cdc.test exit for 1-th time...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
wait process cdc.test exit for 2-th time...
Error: [CDC:ErrChangefeedUpdateRefused]changefeed update error: can only update changefeed config when it is stopped or failed
update changefeed config should fail when changefeed is running, got Diff of changefeed config:
{Type:update Path:[Config CaseSensitive] From:false To:true}
{Type:update Path:[Config SyncPointInterval] From:<nil> To:0xc003d8f940}
{Type:update Path:[Config SyncPointRetention] From:<nil> To:0xc003d8f948}
{Type:update Path:[Config Consistent] From:<nil> To:0xc001306cb0}
{Type:update Path:[Config Scheduler EnableTableAcrossNodes] From:false To:true}
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.3990.out cli changefeed --changefeed-id custom-changefeed-name pause
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:22:54 CST 2024] <<<<<< run test case row_format success! >>>>>>
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/ddl_puller_lag/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1a8e200017	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pfbpq-pmpwv, pid:4012, start at 2024-05-05 11:22:54.256625081 +0800 CST m=+5.074126733	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:24:54.263 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:22:54.266 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:12:54.266 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1a8e200017	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pfbpq-pmpwv, pid:4012, start at 2024-05-05 11:22:54.256625081 +0800 CST m=+5.074126733	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:24:54.263 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:22:54.266 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:12:54.266 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1a8f980014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pfbpq-pmpwv, pid:4084, start at 2024-05-05 11:22:54.355665095 +0800 CST m=+5.124672599	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:24:54.362 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:22:54.361 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:12:54.361 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/ddl_manager/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/ddl_manager/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/ddl_manager/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/ddl_manager/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/ddl_manager/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
+ set +x
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table kafka_big_messages.test not exists for 1-th check, retry later
start tidb cluster in /tmp/tidb_cdc_test/ddl_puller_lag
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
[Sun May  5 11:22:57 CST 2024] <<<<<< START cdc server in ddl_manager case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/ExecuteDDLSlowly=return(true)'
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_manager.54115413.out server --log-file /tmp/tidb_cdc_test/ddl_manager/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_manager/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc)
table kafka_big_messages.test exists
Release Version: v8.2.0-alpha-14-g1679dbca2
3723625472 bytes in 8.37 secs (444621369 bytes/sec)
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
[Pipeline] {
[Pipeline] cache
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
check diff failed 1-th time, retry later
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ endpoints=https://127.0.0.1:2579
+ changefeed_id=custom-changefeed-name
+ expected_state=stopped
+ error_msg=null
+ tls_dir=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates
+ [[ https://127.0.0.1:2579 =~ https ]]
++ cdc cli changefeed query --ca=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem --cert=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem --key=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem --pd=https://127.0.0.1:2579 -c custom-changefeed-name -s
+ info='{
  "upstream_id": 7365350725032055970,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "stopped",
  "checkpoint_tso": 449545338566213637,
  "checkpoint_time": "2024-05-05 11:22:53.803",
  "error": null
}'
+ echo '{
  "upstream_id": 7365350725032055970,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "stopped",
  "checkpoint_tso": 449545338566213637,
  "checkpoint_time": "2024-05-05 11:22:53.803",
  "error": null
}'
{
  "upstream_id": 7365350725032055970,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "stopped",
  "checkpoint_tso": 449545338566213637,
  "checkpoint_time": "2024-05-05 11:22:53.803",
  "error": null
}
++ echo '{' '"upstream_id":' 7365350725032055970, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"stopped",' '"checkpoint_tso":' 449545338566213637, '"checkpoint_time":' '"2024-05-05' '11:22:53.803",' '"error":' null '}'
++ jq -r .state
+ state=stopped
+ [[ ! stopped == \s\t\o\p\p\e\d ]]
++ echo '{' '"upstream_id":' 7365350725032055970, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"stopped",' '"checkpoint_tso":' 449545338566213637, '"checkpoint_time":' '"2024-05-05' '11:22:53.803",' '"error":' null '}'
++ jq -r .error.message
+ message=null
+ [[ ! null =~ null ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.4076.out cli changefeed update --pd=https://127.0.0.1:2579 --config=/tmp/tidb_cdc_test/cli_tls_with_auth/changefeed.toml --no-confirm --changefeed-id custom-changefeed-name
Verifying downstream PD is started...
Diff of changefeed config:
{Type:update Path:[Config CaseSensitive] From:false To:true}
{Type:update Path:[Config SyncPointInterval] From:<nil> To:0xc00198d708}
{Type:update Path:[Config SyncPointRetention] From:<nil> To:0xc00198d718}
{Type:update Path:[Config Consistent] From:<nil> To:0xc001b388c0}
{Type:update Path:[Config Scheduler EnableTableAcrossNodes] From:false To:true}
Update changefeed config successfully! 
ID: custom-changefeed-name
Info: {"upstream_id":7365350725032055970,"namespace":"default","id":"custom-changefeed-name","sink_uri":"kafka://127.0.0.1:9092/ticdc-cli-test-12275?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:22:46.891973055+08:00","start_ts":449545334752018433,"admin_job_type":1,"config":{"memory_quota":1073741824,"case_sensitive":true,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_table_monitor":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","encoder_concurrency":32,"terminator":"\r\n","enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":true,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"stopped","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":0,"checkpoint_ts":449545338566213637,"checkpoint_time":"2024-05-05 11:22:53.803"}
PASS
coverage: 2.8% of statements in github.com/pingcap/tiflow/...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
check diff successfully
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:23:00 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/bf36572a-38f4-4c4a-9d7a-20228771b3e9
	{"id":"bf36572a-38f4-4c4a-9d7a-20228771b3e9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879377}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c679e8cc
	bf36572a-38f4-4c4a-9d7a-20228771b3e9

/tidb/cdc/default/default/upstream/7365350789571968181
	{"id":7365350789571968181,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/bf36572a-38f4-4c4a-9d7a-20228771b3e9
	{"id":"bf36572a-38f4-4c4a-9d7a-20228771b3e9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879377}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c679e8cc
	bf36572a-38f4-4c4a-9d7a-20228771b3e9

/tidb/cdc/default/default/upstream/7365350789571968181
	{"id":7365350789571968181,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/bf36572a-38f4-4c4a-9d7a-20228771b3e9
	{"id":"bf36572a-38f4-4c4a-9d7a-20228771b3e9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879377}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c679e8cc
	bf36572a-38f4-4c4a-9d7a-20228771b3e9

/tidb/cdc/default/default/upstream/7365350789571968181
	{"id":7365350789571968181,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_manager.cli.5468.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-897?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c=ddl-manager
wait process cdc.test exit for 1-th time...
Create changefeed successfully!
ID: ddl-manager
Info: {"upstream_id":7365350789571968181,"namespace":"default","id":"ddl-manager","sink_uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-897?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:23:01.051782515+08:00","start_ts":449545340430843907,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545340430843907,"checkpoint_ts":449545340430843907,"checkpoint_time":"2024-05-05 11:23:00.916"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
wait process cdc.test exit for 2-th time...
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.4111.out cli changefeed --changefeed-id custom-changefeed-name resume
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:23:01 CST 2024] <<<<<< run test case kafka_big_messages_v2 success! >>>>>>
PASS
+ set +x
[Sun May  5 11:23:02 CST 2024] <<<<<< START kafka consumer in ddl_manager case >>>>>>
coverage: 2.1% of statements in github.com/pingcap/tiflow/...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ set +x
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1b1e880008	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd, pid:9396, start at 2024-05-05 11:23:03.465438956 +0800 CST m=+5.167135736	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:03.472 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:03.458 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:03.458 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/drop_many_tables/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
wait process 5416 exit for 1-th time...
wait process 5416 exit for 2-th time...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1b1e880008	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd, pid:9396, start at 2024-05-05 11:23:03.465438956 +0800 CST m=+5.167135736	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:03.472 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:03.458 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:03.458 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1b1f340016	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd, pid:9481, start at 2024-05-05 11:23:03.520205708 +0800 CST m=+5.170729337	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:03.526 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:03.501 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:03.501 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/log/error.log
arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash-proxy.toml"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_sink_error_resume/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
wait process 5416 exit for 3-th time...
+ endpoints=https://127.0.0.1:2579
+ changefeed_id=custom-changefeed-name
+ expected_state=normal
+ error_msg=null
+ tls_dir=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates
+ [[ https://127.0.0.1:2579 =~ https ]]
++ cdc cli changefeed query --ca=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem --cert=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem --key=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem --pd=https://127.0.0.1:2579 -c custom-changefeed-name -s
+ info='{
  "upstream_id": 7365350725032055970,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "normal",
  "checkpoint_tso": 449545341725048834,
  "checkpoint_time": "2024-05-05 11:23:05.853",
  "error": null
}'
+ echo '{
  "upstream_id": 7365350725032055970,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "normal",
  "checkpoint_tso": 449545341725048834,
  "checkpoint_time": "2024-05-05 11:23:05.853",
  "error": null
}'
{
  "upstream_id": 7365350725032055970,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "normal",
  "checkpoint_tso": 449545341725048834,
  "checkpoint_time": "2024-05-05 11:23:05.853",
  "error": null
}
++ echo '{' '"upstream_id":' 7365350725032055970, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449545341725048834, '"checkpoint_time":' '"2024-05-05' '11:23:05.853",' '"error":' null '}'
++ jq -r .state
+ state=normal
+ [[ ! normal == \n\o\r\m\a\l ]]
++ echo '{' '"upstream_id":' 7365350725032055970, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449545341725048834, '"checkpoint_time":' '"2024-05-05' '11:23:05.853",' '"error":' null '}'
++ jq -r .error.message
+ message=null
+ [[ ! null =~ null ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.4193.out cli changefeed --changefeed-id custom-changefeed-name remove
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
wait process 5416 exit for 4-th time...
wait process 5416 exit for 5-th time...
Changefeed remove successfully.
ID: custom-changefeed-name
CheckpointTs: 449545341987454978
SinkURI: kafka://127.0.0.1:9092/ticdc-cli-test-12275?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
[Sun May  5 11:23:08 CST 2024] <<<<<< START cdc server in kafka_sink_error_resume case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/sink/dmlsink/mq/dmlproducer/KafkaSinkAsyncSendError=1*return(true)'
+ (( i = 0 ))
+ (( i <= 50 ))
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_sink_error_resume.1085610858.out server --log-file /tmp/tidb_cdc_test/kafka_sink_error_resume/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_sink_error_resume/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
wait process 5416 exit for 6-th time...
start tidb cluster in /tmp/tidb_cdc_test/drop_many_tables
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
wait process 5416 exit for 7-th time...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ set +x
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils/kill_cdc_pid: line 19: kill: (5416) - No such process
wait process 5416 exit for 8-th time...
process 5416 already exit
[Sun May  5 11:23:09 CST 2024] <<<<<< START cdc server in ddl_manager case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/ExecuteDDLSlowly=return(true)'
+ (( i = 0 ))
+ (( i <= 50 ))
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_manager.55785580.out server --log-file /tmp/tidb_cdc_test/ddl_manager/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_manager/cdc_data --cluster-id default
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1b6cf40013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-6skrl-cxvmd, pid:4120, start at 2024-05-05 11:23:08.506551502 +0800 CST m=+5.198812568	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:08.515 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:08.528 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:08.528 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1b6cf40013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-6skrl-cxvmd, pid:4120, start at 2024-05-05 11:23:08.506551502 +0800 CST m=+5.198812568	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:08.515 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:08.528 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:08.528 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1b6e7c0014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-6skrl-cxvmd, pid:4199, start at 2024-05-05 11:23:08.605277555 +0800 CST m=+5.237595018	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:08.614 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:08.575 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:08.575 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/ddl_puller_lag/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/ddl_puller_lag/tiflash/log/error.log
arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/ddl_puller_lag/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/ddl_puller_lag/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/ddl_puller_lag/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:23:11 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/b4230b92-1eb5-4c1e-8c30-d227d81ba563
	{"id":"b4230b92-1eb5-4c1e-8c30-d227d81ba563","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879388}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c69baacb
	b4230b92-1eb5-4c1e-8c30-d227d81ba563

/tidb/cdc/default/default/upstream/7365350835120156608
	{"id":7365350835120156608,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/b4230b92-1eb5-4c1e-8c30-d227d81ba563
	{"id":"b4230b92-1eb5-4c1e-8c30-d227d81ba563","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879388}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c69baacb
	b4230b92-1eb5-4c1e-8c30-d227d81ba563

/tidb/cdc/default/default/upstream/7365350835120156608
	{"id":7365350835120156608,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/b4230b92-1eb5-4c1e-8c30-d227d81ba563
	{"id":"b4230b92-1eb5-4c1e-8c30-d227d81ba563","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879388}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c69baacb
	b4230b92-1eb5-4c1e-8c30-d227d81ba563

/tidb/cdc/default/default/upstream/7365350835120156608
	{"id":7365350835120156608,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
[Sun May  5 11:23:11 CST 2024] <<<<<< START kafka consumer in kafka_sink_error_resume case >>>>>>
check_changefeed_status 127.0.0.1:8300 22cf8a50-82e9-4c9f-a3e4-3982e0bfa03f warning last_warning kafka sink injected error
+ endpoint=127.0.0.1:8300
+ changefeed_id=22cf8a50-82e9-4c9f-a3e4-3982e0bfa03f
+ expected_state=warning
+ field=last_warning
+ error_pattern=kafka
++ curl 127.0.0.1:8300/api/v2/changefeeds/22cf8a50-82e9-4c9f-a3e4-3982e0bfa03f/status
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed
Verifying downstream PD is started...

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100    86  100    86    0     0    689      0 --:--:-- --:--:-- --:--:--   693
+ info='{"state":"normal","resolved_ts":449545343273271301,"checkpoint_ts":449545343273271301}'
+ echo '{"state":"normal","resolved_ts":449545343273271301,"checkpoint_ts":449545343273271301}'
{"state":"normal","resolved_ts":449545343273271301,"checkpoint_ts":449545343273271301}
++ echo '{"state":"normal","resolved_ts":449545343273271301,"checkpoint_ts":449545343273271301}'
++ jq -r .state
+ state=normal
+ [[ ! normal == \w\a\r\n\i\n\g ]]
+ echo 'changefeed state normal does not equal to warning'
changefeed state normal does not equal to warning
+ exit 1
run task failed 1-th time, retry later
changefeed count 0 check pass, pd_addr: https://127.0.0.1:2579
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.4272.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-cli-test-12275?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --tz=Asia/Shanghai -c=custom-changefeed-name
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
[WARN] --tz is deprecated in changefeed settings.
Create changefeed successfully!
ID: custom-changefeed-name
Info: {"upstream_id":7365350725032055970,"namespace":"default","id":"custom-changefeed-name","sink_uri":"kafka://127.0.0.1:9092/ticdc-cli-test-12275?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:23:13.485187949+08:00","start_ts":449545343664914438,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545343664914438,"checkpoint_ts":449545343664914438,"checkpoint_time":"2024-05-05 11:23:13.253"}
PASS
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_puller_lag.cli.5642.out cli tso query --pd=http://127.0.0.1:2379
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
check_changefeed_status 127.0.0.1:8300 22cf8a50-82e9-4c9f-a3e4-3982e0bfa03f warning last_warning kafka sink injected error
+ endpoint=127.0.0.1:8300
+ changefeed_id=22cf8a50-82e9-4c9f-a3e4-3982e0bfa03f
+ expected_state=warning
+ field=last_warning
+ error_pattern=kafka
++ curl 127.0.0.1:8300/api/v2/changefeeds/22cf8a50-82e9-4c9f-a3e4-3982e0bfa03f/status
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100    86  100    86    0     0    784      0 --:--:-- --:--:-- --:--:--   788
+ info='{"state":"normal","resolved_ts":449545343351914523,"checkpoint_ts":449545343351914523}'
+ echo '{"state":"normal","resolved_ts":449545343351914523,"checkpoint_ts":449545343351914523}'
{"state":"normal","resolved_ts":449545343351914523,"checkpoint_ts":449545343351914523}
++ echo '{"state":"normal","resolved_ts":449545343351914523,"checkpoint_ts":449545343351914523}'
++ jq -r .state
+ state=normal
+ [[ ! normal == \w\a\r\n\i\n\g ]]
+ echo 'changefeed state normal does not equal to warning'
changefeed state normal does not equal to warning
+ exit 1
run task failed 2-th time, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
+ set +x
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ set +x
+ tso='449545343854968834
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545343854968834 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:23:16 GMT
< Content-Type: text/plain; charset=utf-8
< Transfer-Encoding: chunked
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:

changefeedID: default/ddl-manager
{UpstreamID:7365350789571968181 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-897?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-05 11:23:01.051782515 +0800 CST StartTs:449545340430843907 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0038745a0 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545340457320453}
{CheckpointTs:449545340850274307 MinTableBarrierTs:449545340850274307 AdminJobType:noop}



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/dfdcd6a6-6edc-419f-8c30-605306edd3e9
	{"id":"dfdcd6a6-6edc-419f-8c30-605306edd3e9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879390}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c679eb59
	dfdcd6a6-6edc-419f-8c30-605306edd3e9

/tidb/cdc/default/default/changefeed/info/ddl-manager
	{"upstream-id":7365350789571968181,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-897?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:23:01.051782515+08:00","start-ts":449545340430843907,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545340457320453}

/tidb/cdc/default/default/changefeed/status/ddl-manager
	{"checkpoint-ts":449545340863381540,"min-table-barrier-ts":449545340863381540,"admin-job-type":0}

/tidb/cdc/default/default/task/position/dfdcd6a6-6edc-419f-8c30-605306edd3e9/ddl-manager
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/upstream/7365350789571968181
	{"id":7365350789571968181,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:

changefeedID: default/ddl-manager
{UpstreamID:7365350789571968181 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-897?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-05 11:23:01.051782515 +0800 CST StartTs:449545340430843907 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0038745a0 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545340457320453}
{CheckpointTs:449545340850274307 MinTableBarrierTs:449545340850274307 AdminJobType:noop}



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/dfdcd6a6-6edc-419f-8c30-605306edd3e9
	{"id":"dfdcd6a6-6edc-419f-8c30-605306edd3e9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879390}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c679eb59
	dfdcd6a6-6edc-419f-8c30-605306edd3e9

/tidb/cdc/default/default/changefeed/info/ddl-manager
	{"upstream-id":7365350789571968181,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-897?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:23:01.051782515+08:00","start-ts":449545340430843907,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545340457320453}

/tidb/cdc/default/default/changefeed/status/ddl-manager
	{"checkpoint-ts":449545340863381540,"min-table-barrier-ts":449545340863381540,"admin-job-type":0}

/tidb/cdc/default/default/task/position/dfdcd6a6-6edc-419f-8c30-605306edd3e9/ddl-manager
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/upstream/7365350789571968181
	{"id":7365350789571968181,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:

changefeedID: default/ddl-manager
{UpstreamID:7365350789571968181 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-897?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-05 11:23:01.051782515 +0800 CST StartTs:449545340430843907 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0038745a0 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545340457320453}
{CheckpointTs:449545340850274307 MinTableBarrierTs:449545340850274307 AdminJobType:noop}



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/dfdcd6a6-6edc-419f-8c30-605306edd3e9
	{"id":"dfdcd6a6-6edc-419f-8c30-605306edd3e9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879390}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c679eb59
	dfdcd6a6-6edc-419f-8c30-605306edd3e9

/tidb/cdc/default/default/changefeed/info/ddl-manager
	{"upstream-id":7365350789571968181,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-897?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:23:01.051782515+08:00","start-ts":449545340430843907,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545340457320453}

/tidb/cdc/default/default/changefeed/status/ddl-manager
	{"checkpoint-ts":449545340863381540,"min-table-barrier-ts":449545340863381540,"admin-job-type":0}

/tidb/cdc/default/default/task/position/dfdcd6a6-6edc-419f-8c30-605306edd3e9/ddl-manager
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/upstream/7365350789571968181
	{"id":7365350789571968181,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
[Sun May  5 11:23:16 CST 2024] <<<<<< START cdc server in ddl_manager case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/ExecuteDDLSlowly=return(true)'
+ (( i = 0 ))
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_manager.56365638.out server --log-file /tmp/tidb_cdc_test/ddl_manager/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_manager/cdc_data --cluster-id default
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
[Sun May  5 11:23:16 CST 2024] <<<<<< START cdc server in ddl_puller_lag case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/processorDDLResolved=1*sleep(180000)'
+ (( i = 0 ))
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_puller_lag.56965698.out server --log-file /tmp/tidb_cdc_test/ddl_puller_lag/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_puller_lag/cdc_data --cluster-id default
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_tables_ddl_v2/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
start tidb cluster in /tmp/tidb_cdc_test/multi_tables_ddl_v2
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
+ endpoints=https://127.0.0.1:2579
+ changefeed_id=custom-changefeed-name
+ expected_state=normal
+ error_msg=null
+ tls_dir=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates
+ [[ https://127.0.0.1:2579 =~ https ]]
++ cdc cli changefeed query --ca=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem --cert=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem --key=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem --pd=https://127.0.0.1:2579 -c custom-changefeed-name -s
+ info='{
  "upstream_id": 7365350725032055970,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "normal",
  "checkpoint_tso": 449545344870776835,
  "checkpoint_time": "2024-05-05 11:23:17.853",
  "error": null
}'
+ echo '{
  "upstream_id": 7365350725032055970,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "normal",
  "checkpoint_tso": 449545344870776835,
  "checkpoint_time": "2024-05-05 11:23:17.853",
  "error": null
}'
{
  "upstream_id": 7365350725032055970,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "normal",
  "checkpoint_tso": 449545344870776835,
  "checkpoint_time": "2024-05-05 11:23:17.853",
  "error": null
}
++ echo '{' '"upstream_id":' 7365350725032055970, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449545344870776835, '"checkpoint_time":' '"2024-05-05' '11:23:17.853",' '"error":' null '}'
++ jq -r .state
+ state=normal
+ [[ ! normal == \n\o\r\m\a\l ]]
++ echo '{' '"upstream_id":' 7365350725032055970, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449545344870776835, '"checkpoint_time":' '"2024-05-05' '11:23:17.853",' '"error":' null '}'
++ jq -r .error.message
+ message=null
+ [[ ! null =~ null ]]
check_changefeed_status 127.0.0.1:8300 22cf8a50-82e9-4c9f-a3e4-3982e0bfa03f warning last_warning kafka sink injected error
+ endpoint=127.0.0.1:8300
+ changefeed_id=22cf8a50-82e9-4c9f-a3e4-3982e0bfa03f
+ expected_state=warning
+ field=last_warning
+ error_pattern=kafka
++ curl 127.0.0.1:8300/api/v2/changefeeds/22cf8a50-82e9-4c9f-a3e4-3982e0bfa03f/status
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   244  100   244    0     0   2216      0 --:--:-- --:--:-- --:--:--  2238
+ info='{"state":"warning","resolved_ts":449545344741015555,"checkpoint_ts":449545343377866774,"last_warning":{"time":"2024-05-05T11:23:15.997174203+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}'
+ echo '{"state":"warning","resolved_ts":449545344741015555,"checkpoint_ts":449545343377866774,"last_warning":{"time":"2024-05-05T11:23:15.997174203+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}'
{"state":"warning","resolved_ts":449545344741015555,"checkpoint_ts":449545343377866774,"last_warning":{"time":"2024-05-05T11:23:15.997174203+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}
++ echo '{"state":"warning","resolved_ts":449545344741015555,"checkpoint_ts":449545343377866774,"last_warning":{"time":"2024-05-05T11:23:15.997174203+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}'
++ jq -r .state
+ state=warning
+ [[ ! warning == \w\a\r\n\i\n\g ]]
+ [[ -z last_warning ]]
++ echo '{"state":"warning","resolved_ts":449545344741015555,"checkpoint_ts":449545343377866774,"last_warning":{"time":"2024-05-05T11:23:15.997174203+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}'
++ jq -r .last_warning.message
+ error_msg='kafka sink injected error'
+ [[ ! kafka sink injected error =~ kafka ]]
run task successfully
Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc)
3723625472 bytes in 18.34 secs (203057237 bytes/sec)
[Pipeline] {
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.4384.out cli changefeed create --start-ts=449545334752018433 '--sink-uri=kafka://127.0.0.1:9093/ticdc-cli-test-ssl-13037?protocol=open-protocol&ca=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem&cert=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem&key=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem&kafka-version=2.4.1&max-message-bytes=10485760&insecure-skip-verify=true' --tz=Asia/Shanghai
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 6 min 0 sec
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 6 min 0 sec
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 6 min 0 sec
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 6 min 0 sec
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 6 min 0 sec
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 6 min 0 sec
[Pipeline] {
[Pipeline] sh
check_changefeed_status 127.0.0.1:8300 22cf8a50-82e9-4c9f-a3e4-3982e0bfa03f normal
+ endpoint=127.0.0.1:8300
+ changefeed_id=22cf8a50-82e9-4c9f-a3e4-3982e0bfa03f
+ expected_state=normal
+ field=
+ error_pattern=
++ curl 127.0.0.1:8300/api/v2/changefeeds/22cf8a50-82e9-4c9f-a3e4-3982e0bfa03f/status
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   244  100   244    0     0   1844      0 --:--:-- --:--:-- --:--:--  1834
100   244  100   244    0     0   1843      0 --:--:-- --:--:-- --:--:--  1834
+ info='{"state":"warning","resolved_ts":449545344741015555,"checkpoint_ts":449545343377866774,"last_warning":{"time":"2024-05-05T11:23:15.997174203+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}'
+ echo '{"state":"warning","resolved_ts":449545344741015555,"checkpoint_ts":449545343377866774,"last_warning":{"time":"2024-05-05T11:23:15.997174203+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}'
{"state":"warning","resolved_ts":449545344741015555,"checkpoint_ts":449545343377866774,"last_warning":{"time":"2024-05-05T11:23:15.997174203+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}
++ echo '{"state":"warning","resolved_ts":449545344741015555,"checkpoint_ts":449545343377866774,"last_warning":{"time":"2024-05-05T11:23:15.997174203+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}'
++ jq -r .state
+ state=warning
+ [[ ! warning == \n\o\r\m\a\l ]]
+ echo 'changefeed state warning does not equal to normal'
changefeed state warning does not equal to normal
+ exit 1
run task failed 1-th time, retry later
[WARN] --tz is deprecated in changefeed settings.
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Create changefeed successfully!
ID: 6925e140-ccef-4c12-9e2f-e26ff2068fa5
Info: {"upstream_id":7365350725032055970,"namespace":"default","id":"6925e140-ccef-4c12-9e2f-e26ff2068fa5","sink_uri":"kafka://127.0.0.1:9093/ticdc-cli-test-ssl-13037?protocol=open-protocol\u0026ca=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem\u0026cert=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem\u0026key=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760\u0026insecure-skip-verify=true","create_time":"2024-05-05T11:23:19.726155936+08:00","start_ts":449545334752018433,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545334752018433,"checkpoint_ts":449545334752018433,"checkpoint_time":"2024-05-05 11:22:39.253"}
PASS
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:23:19 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/9efff0b4-c282-4ba3-a59e-e1f5cec10061
	{"id":"9efff0b4-c282-4ba3-a59e-e1f5cec10061","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879396}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c6b109f9
	9efff0b4-c282-4ba3-a59e-e1f5cec10061

/tidb/cdc/default/default/upstream/7365350846823046665
	{"id":7365350846823046665,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/9efff0b4-c282-4ba3-a59e-e1f5cec10061
	{"id":"9efff0b4-c282-4ba3-a59e-e1f5cec10061","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879396}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c6b109f9
	9efff0b4-c282-4ba3-a59e-e1f5cec10061

/tidb/cdc/default/default/upstream/7365350846823046665
	{"id":7365350846823046665,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/9efff0b4-c282-4ba3-a59e-e1f5cec10061
	{"id":"9efff0b4-c282-4ba3-a59e-e1f5cec10061","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879396}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c6b109f9
	9efff0b4-c282-4ba3-a59e-e1f5cec10061

/tidb/cdc/default/default/upstream/7365350846823046665
	{"id":7365350846823046665,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_puller_lag.cli.5750.out cli changefeed create --start-ts=449545343854968834 '--sink-uri=kafka+ssl://127.0.0.1:9092/ticdc-ddl-puller-lag-test-17319?protocol=open-protocol&partition-num=4&kafka-client-id=ddl_puller_lag&kafka-version=2.4.1&max-message-bytes=10485760'
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
Verifying downstream PD is started...
Create changefeed successfully!
ID: 36caf3e3-526b-4cb4-9e91-656cae2e74a1
Info: {"upstream_id":7365350846823046665,"namespace":"default","id":"36caf3e3-526b-4cb4-9e91-656cae2e74a1","sink_uri":"kafka+ssl://127.0.0.1:9092/ticdc-ddl-puller-lag-test-17319?protocol=open-protocol\u0026partition-num=4\u0026kafka-client-id=ddl_puller_lag\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:23:20.072631652+08:00","start_ts":449545343854968834,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545343854968834,"checkpoint_ts":449545343854968834,"checkpoint_time":"2024-05-05 11:23:13.978"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
[Pipeline] sh
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
[Pipeline] sh
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.4427.out cli unsafe delete-service-gc-safepoint
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:23:20 GMT
< Content-Type: text/plain; charset=utf-8
< Transfer-Encoding: chunked
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:

changefeedID: default/ddl-manager
{UpstreamID:7365350789571968181 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-897?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-05 11:23:01.051782515 +0800 CST StartTs:449545340430843907 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0038745a0 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545340457320453}
{CheckpointTs:449545340863381540 MinTableBarrierTs:449545340863381540 AdminJobType:noop}



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/dfdcd6a6-6edc-419f-8c30-605306edd3e9
	{"id":"dfdcd6a6-6edc-419f-8c30-605306edd3e9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879390}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c679eb59
	dfdcd6a6-6edc-419f-8c30-605306edd3e9

/tidb/cdc/default/default/changefeed/info/ddl-manager
	{"upstream-id":7365350789571968181,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-897?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:23:01.051782515+08:00","start-ts":449545340430843907,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545340457320453}

/tidb/cdc/default/default/changefeed/status/ddl-manager
	{"checkpoint-ts":449545340863381540,"min-table-barrier-ts":449545340863381540,"admin-job-type":0}

/tidb/cdc/default/default/task/position/dfdcd6a6-6edc-419f-8c30-605306edd3e9/ddl-manager
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/upstream/7365350789571968181
	{"id":7365350789571968181,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:

changefeedID: default/ddl-manager
{UpstreamID:7365350789571968181 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-897?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-05 11:23:01.051782515 +0800 CST StartTs:449545340430843907 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0038745a0 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545340457320453}
{CheckpointTs:449545340863381540 MinTableBarrierTs:449545340863381540 AdminJobType:noop}



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/dfdcd6a6-6edc-419f-8c30-605306edd3e9
	{"id":"dfdcd6a6-6edc-419f-8c30-605306edd3e9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879390}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c679eb59
	dfdcd6a6-6edc-419f-8c30-605306edd3e9

/tidb/cdc/default/default/changefeed/info/ddl-manager
	{"upstream-id":7365350789571968181,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-897?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:23:01.051782515+08:00","start-ts":449545340430843907,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545340457320453}

/tidb/cdc/default/default/changefeed/status/ddl-manager
	{"checkpoint-ts":449545340863381540,"min-table-barrier-ts":449545340863381540,"admin-job-type":0}

/tidb/cdc/default/default/task/position/dfdcd6a6-6edc-419f-8c30-605306edd3e9/ddl-manager
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/upstream/7365350789571968181
	{"id":7365350789571968181,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ grep -q 'etcd info'
+ echo '

*** owner info ***:



*** processors info ***:

changefeedID: default/ddl-manager
{UpstreamID:7365350789571968181 Namespace:default ID:ddl-manager SinkURI:kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-897?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-05 11:23:01.051782515 +0800 CST StartTs:449545340430843907 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0038745a0 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545340457320453}
{CheckpointTs:449545340863381540 MinTableBarrierTs:449545340863381540 AdminJobType:noop}



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/dfdcd6a6-6edc-419f-8c30-605306edd3e9
	{"id":"dfdcd6a6-6edc-419f-8c30-605306edd3e9","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879390}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c679eb59
	dfdcd6a6-6edc-419f-8c30-605306edd3e9

/tidb/cdc/default/default/changefeed/info/ddl-manager
	{"upstream-id":7365350789571968181,"namespace":"default","changefeed-id":"ddl-manager","sink-uri":"kafka://127.0.0.1:9092/ticdc-ddl-mamager-test-897?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:23:01.051782515+08:00","start-ts":449545340430843907,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545340457320453}

/tidb/cdc/default/default/changefeed/status/ddl-manager
	{"checkpoint-ts":449545340863381540,"min-table-barrier-ts":449545340863381540,"admin-job-type":0}

/tidb/cdc/default/default/task/position/dfdcd6a6-6edc-419f-8c30-605306edd3e9/ddl-manager
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/upstream/7365350789571968181
	{"id":7365350789571968181,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ break
+ set +x
table ddl_manager.finish_mark not exists for 1-th check, retry later
[Pipeline] sh
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Confirm that you know what this command will do and use it at your own risk [Y/N]
CDC service GC safepoint truncated in PD!
PASS
+ set +x
[Sun May  5 11:23:21 CST 2024] <<<<<< START kafka consumer in ddl_puller_lag case >>>>>>
+ echo Waiting for zookeeper to be ready...
Waiting for zookeeper to be ready...
+ nc -z localhost 2181
+ echo Waiting for kafka to be ready...
Waiting for kafka to be ready...
+ nc -z localhost 9092
+ echo Waiting for kafka-broker to be ready...
Waiting for kafka-broker to be ready...
+ echo dump
+ nc localhost 2181
+ grep brokers
+ awk {$1=$1;print}
+ grep -F -w /brokers/ids/1
/brokers/ids/1
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
[Pipeline] sh
check_changefeed_status 127.0.0.1:8300 22cf8a50-82e9-4c9f-a3e4-3982e0bfa03f normal
+ endpoint=127.0.0.1:8300
+ changefeed_id=22cf8a50-82e9-4c9f-a3e4-3982e0bfa03f
+ expected_state=normal
+ field=
+ error_pattern=
++ curl 127.0.0.1:8300/api/v2/changefeeds/22cf8a50-82e9-4c9f-a3e4-3982e0bfa03f/status
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   244  100   244    0     0   2199      0 --:--:-- --:--:-- --:--:--  2218
+ info='{"state":"warning","resolved_ts":449545345527709698,"checkpoint_ts":449545343377866774,"last_warning":{"time":"2024-05-05T11:23:15.997174203+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}'
+ echo '{"state":"warning","resolved_ts":449545345527709698,"checkpoint_ts":449545343377866774,"last_warning":{"time":"2024-05-05T11:23:15.997174203+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}'
{"state":"warning","resolved_ts":449545345527709698,"checkpoint_ts":449545343377866774,"last_warning":{"time":"2024-05-05T11:23:15.997174203+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}
++ echo '{"state":"warning","resolved_ts":449545345527709698,"checkpoint_ts":449545343377866774,"last_warning":{"time":"2024-05-05T11:23:15.997174203+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}'
++ jq -r .state
+ state=warning
+ [[ ! warning == \n\o\r\m\a\l ]]
+ echo 'changefeed state warning does not equal to normal'
changefeed state warning does not equal to normal
+ exit 1
run task failed 2-th time, retry later
+ echo Waiting for zookeeper to be ready...
Waiting for zookeeper to be ready...
+ nc -z localhost 2181
+ echo Waiting for kafka to be ready...
Waiting for kafka to be ready...
+ nc -z localhost 9092
+ echo Waiting for kafka-broker to be ready...
Waiting for kafka-broker to be ready...
+ echo dump
+ nc localhost 2181
+ grep brokers
+ awk {$1=$1;print}
+ grep -F -w /brokers/ids/1
/brokers/ids/1
[Pipeline] sh
+ echo Waiting for zookeeper to be ready...
Waiting for zookeeper to be ready...
+ nc -z localhost 2181
+ echo Waiting for kafka to be ready...
Waiting for kafka to be ready...
+ nc -z localhost 9092
+ echo Waiting for kafka-broker to be ready...
Waiting for kafka-broker to be ready...
+ echo dump
+ nc localhost 2181
+ grep brokers
+ awk {$1=$1;print}
+ grep -F -w /brokers/ids/1
/brokers/ids/1
+ echo Waiting for zookeeper to be ready...
Waiting for zookeeper to be ready...
+ nc -z localhost 2181
+ echo Waiting for kafka to be ready...
Waiting for kafka to be ready...
+ nc -z localhost 9092
+ echo Waiting for kafka-broker to be ready...
Waiting for kafka-broker to be ready...
+ echo dump
+ nc localhost 2181
+ grep brokers
+ awk {$1=$1;print}
+ grep -F -w /brokers/ids/1
/brokers/ids/1
+ echo Waiting for zookeeper to be ready...
Waiting for zookeeper to be ready...
+ nc -z localhost 2181
+ echo Waiting for kafka to be ready...
Waiting for kafka to be ready...
+ nc -z localhost 9092
+ echo Waiting for kafka-broker to be ready...
Waiting for kafka-broker to be ready...
+ echo dump
+ nc localhost 2181
+ grep brokers
+ awk {$1=$1;print}
+ grep -F -w /brokers/ids/1
/brokers/ids/1
+ echo Waiting for zookeeper to be ready...
Waiting for zookeeper to be ready...
+ nc -z localhost 2181
+ echo Waiting for kafka to be ready...
Waiting for kafka to be ready...
+ nc -z localhost 9092
+ echo Waiting for kafka-broker to be ready...
Waiting for kafka-broker to be ready...
+ echo dump
+ nc localhost 2181
+ grep brokers
+ awk {$1=$1;print}
+ grep -F -w /brokers/ids/1
/brokers/ids/1
table ddl_manager.finish_mark not exists for 2-th check, retry later
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.4461.out cli unsafe reset --no-confirm --pd=https://127.0.0.1:2579
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1c25f40014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3cmhg-gg08m, pid:4153, start at 2024-05-05 11:23:20.353508377 +0800 CST m=+5.211625222	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:20.359 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:20.317 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:20.317 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1c25f40014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3cmhg-gg08m, pid:4153, start at 2024-05-05 11:23:20.353508377 +0800 CST m=+5.211625222	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:20.359 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:20.317 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:20.317 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1c25180015	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3cmhg-gg08m, pid:4228, start at 2024-05-05 11:23:20.306563727 +0800 CST m=+5.116193226	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:20.314 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:20.312 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:20.312 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/drop_many_tables/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/drop_many_tables/tiflash/log/error.log
arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/drop_many_tables/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/drop_many_tables/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/drop_many_tables/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
reset and all metadata truncated in PD!
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
table ddl_manager.finish_mark not exists for 3-th check, retry later
+ set +x
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.drop_many_tables.cli.5663.out cli tso query --pd=http://127.0.0.1:2379
check_changefeed_status 127.0.0.1:8300 22cf8a50-82e9-4c9f-a3e4-3982e0bfa03f normal
+ endpoint=127.0.0.1:8300
+ changefeed_id=22cf8a50-82e9-4c9f-a3e4-3982e0bfa03f
+ expected_state=normal
+ field=
+ error_pattern=
++ curl 127.0.0.1:8300/api/v2/changefeeds/22cf8a50-82e9-4c9f-a3e4-3982e0bfa03f/status
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   244  100   244    0     0   2187      0 --:--:-- --:--:-- --:--:--  2198
+ info='{"state":"warning","resolved_ts":449545346838167554,"checkpoint_ts":449545343377866774,"last_warning":{"time":"2024-05-05T11:23:15.997174203+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}'
+ echo '{"state":"warning","resolved_ts":449545346838167554,"checkpoint_ts":449545343377866774,"last_warning":{"time":"2024-05-05T11:23:15.997174203+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}'
{"state":"warning","resolved_ts":449545346838167554,"checkpoint_ts":449545343377866774,"last_warning":{"time":"2024-05-05T11:23:15.997174203+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka sink injected error"}}
++ echo '{"state":"warning","resolved_ts":449545346838167554,"checkpoint_ts":449545343377866774,"last_warning":{"time":"2024-05-05T11:23:15.997174203+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrProcessorUnknown","message":"kafka' sink injected 'error"}}'
++ jq -r .state
+ state=warning
+ [[ ! warning == \n\o\r\m\a\l ]]
+ echo 'changefeed state warning does not equal to normal'
changefeed state warning does not equal to normal
+ exit 1
run task failed 3-th time, retry later
table ddl_manager.finish_mark not exists for 4-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ set +x
+ tso='449545346945384450
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545346945384450 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
[Sun May  5 11:23:27 CST 2024] <<<<<< START cdc server in drop_many_tables case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ (( i = 0 ))
+ (( i <= 50 ))
+ GO_FAILPOINTS=
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.drop_many_tables.56985700.out server --log-file /tmp/tidb_cdc_test/drop_many_tables/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/drop_many_tables/cdc_data --cluster-id default
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.4573.out cli unsafe resolve-lock --region=32
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1ca5980009	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-2hg1d-32fk1, pid:6007, start at 2024-05-05 11:23:28.496025356 +0800 CST m=+5.386550820	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:28.505 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:28.486 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:28.486 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table ddl_manager.finish_mark not exists for 5-th check, retry later
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_tls_with_auth.cli.4609.out cli unsafe resolve-lock --region=32 --ts=449545346797535235
PASS
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:23:30 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/7e41e158-2c08-4ca1-a60f-ee808871ce4a
	{"id":"7e41e158-2c08-4ca1-a60f-ee808871ce4a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879407}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c6dd54d6
	7e41e158-2c08-4ca1-a60f-ee808871ce4a

/tidb/cdc/default/default/upstream/7365350903334789367
	{"id":7365350903334789367,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/7e41e158-2c08-4ca1-a60f-ee808871ce4a
	{"id":"7e41e158-2c08-4ca1-a60f-ee808871ce4a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879407}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c6dd54d6
	7e41e158-2c08-4ca1-a60f-ee808871ce4a

/tidb/cdc/default/default/upstream/7365350903334789367
	{"id":7365350903334789367,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/7e41e158-2c08-4ca1-a60f-ee808871ce4a
	{"id":"7e41e158-2c08-4ca1-a60f-ee808871ce4a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879407}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c6dd54d6
	7e41e158-2c08-4ca1-a60f-ee808871ce4a

/tidb/cdc/default/default/upstream/7365350903334789367
	{"id":7365350903334789367,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.drop_many_tables.cli.5753.out cli changefeed create --start-ts=449545346945384450 '--sink-uri=kafka://127.0.0.1:9092/ticdc-drop-tables-test-31810?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760'
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
Create changefeed successfully!
ID: c9a61478-2829-4ac8-b995-76ac502a9fcc
Info: {"upstream_id":7365350903334789367,"namespace":"default","id":"c9a61478-2829-4ac8-b995-76ac502a9fcc","sink_uri":"kafka://127.0.0.1:9092/ticdc-drop-tables-test-31810?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:23:30.771154422+08:00","start_ts":449545346945384450,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545346945384450,"checkpoint_ts":449545346945384450,"checkpoint_time":"2024-05-05 11:23:25.767"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
table ddl_manager.finish_mark not exists for 6-th check, retry later
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1ca5980009	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-2hg1d-32fk1, pid:6007, start at 2024-05-05 11:23:28.496025356 +0800 CST m=+5.386550820	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:28.505 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:28.486 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:28.486 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1ca58c0010	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-2hg1d-32fk1, pid:6088, start at 2024-05-05 11:23:28.497970767 +0800 CST m=+5.335863032	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:28.506 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:28.483 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:28.483 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash-proxy.toml"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_tables_ddl_v2/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
+ set +x
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   218  100   212  100     6   1608     45 --:--:-- --:--:-- --:--:--  1618
{
    "error_msg": "[CDC:ErrAPIInvalidParam]invalid log level: json: cannot unmarshal string into Go value of type struct { Level string \"json:\\\"log_level\\\"\" }",
    "error_code": "CDC:ErrAPIInvalidParam"
+ set +x
[Sun May  5 11:23:32 CST 2024] <<<<<< START kafka consumer in drop_many_tables case >>>>>>
[Pipeline] }
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
check_changefeed_status 127.0.0.1:8300 22cf8a50-82e9-4c9f-a3e4-3982e0bfa03f normal
+ endpoint=127.0.0.1:8300
+ changefeed_id=22cf8a50-82e9-4c9f-a3e4-3982e0bfa03f
+ expected_state=normal
+ field=
+ error_pattern=
++ curl 127.0.0.1:8300/api/v2/changefeeds/22cf8a50-82e9-4c9f-a3e4-3982e0bfa03f/status
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100    86  100    86    0     0    768      0 --:--:-- --:--:-- --:--:--   774
+ info='{"state":"normal","resolved_ts":449545348411031555,"checkpoint_ts":449545348411031555}'
+ echo '{"state":"normal","resolved_ts":449545348411031555,"checkpoint_ts":449545348411031555}'
{"state":"normal","resolved_ts":449545348411031555,"checkpoint_ts":449545348411031555}
++ echo '{"state":"normal","resolved_ts":449545348411031555,"checkpoint_ts":449545348411031555}'
++ jq -r .state
+ state=normal
+ [[ ! normal == \n\o\r\m\a\l ]]
+ [[ -z '' ]]
++ echo '{"state":"normal","resolved_ts":449545348411031555,"checkpoint_ts":449545348411031555}'
++ jq -r .last_error
+ error_msg=null
+ [[ ! null == \n\u\l\l ]]
++ echo '{"state":"normal","resolved_ts":449545348411031555,"checkpoint_ts":449545348411031555}'
++ jq -r .last_warning
+ error_msg=null
+ [[ ! null == \n\u\l\l ]]
+ exit 0
run task successfully
table kafka_sink_error_resume.t1 exists
table kafka_sink_error_resume.t2 exists
check diff successfully
check diff failed 1-th time, retry later
table ddl_manager.finish_mark not exists for 7-th check, retry later
[Pipeline] timeout
Timeout set to expire in 45 min
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 45 min
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 45 min
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 45 min
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 45 min
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 45 min
[Pipeline] {
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] withCredentials
Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN
[Pipeline] withCredentials
[Sun May  5 11:23:33 CST 2024] <<<<<< START cdc server in multi_tables_ddl_v2 case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ GO_FAILPOINTS=
+ (( i = 0 ))
+ (( i <= 50 ))
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_tables_ddl_v2.74627464.out server --log-file /tmp/tidb_cdc_test/multi_tables_ddl_v2/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_tables_ddl_v2/cdc_data --cluster-id default
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN
[Pipeline] withCredentials
Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN
[Pipeline] withCredentials
Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN
[Pipeline] withCredentials
Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN
[Pipeline] withCredentials
Masking supported pattern matches of $TICDC_COVERALLS_TOKEN or $TICDC_CODECOV_TOKEN
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // container
[Pipeline] // container
[Pipeline] // container
[Pipeline] // container
[Pipeline] // container
[Pipeline] sh
check diff successfully
wait process cdc.test exit for 1-th time...
table ddl_manager.finish_mark not exists for 8-th check, retry later
[Pipeline] sh
wait process cdc.test exit for 2-th time...
}  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   181  100   181    0     0   1312      0 --:--:-- --:--:-- --:--:--  1321
{
 "version": "v8.2.0-alpha-53-g0de8dc3e4",
 "git_hash": "0de8dc3e43ec741eba58047155ce7f3dba8eb4f7",
 "id": "329e9c8c-7544-4d00-88fa-3e3a4e4e71c4",
 "pid": 3671,
 "is_owner": true
+ rm -rf /tmp/tidb_cdc_test
+ mkdir -p /tmp/tidb_cdc_test
+ chmod +x ./tests/integration_tests/run_group.sh
+ ./tests/integration_tests/run_group.sh kafka G06
Run cases: sink_retry changefeed_error ddl_sequence resourcecontrol
PROW_JOB_ID=d886c6eb-17f2-4f64-86e7-91214509cf93
JENKINS_NODE_COOKIE=ce81b722-e58a-4e76-8d5d-116647441594
BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/
GOLANG_VERSION=1.21.0
HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pvtgm-6sx1h
HUDSON_SERVER_COOKIE=83ef27fe9acccc92
KUBERNETES_PORT_443_TCP_PORT=443
KUBERNETES_PORT=tcp://10.233.0.1:443
TERM=xterm
STAGE_NAME=Test
BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855
KUBERNETES_SERVICE_PORT=443
GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009
JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786957579159605252","prowjobid":"d886c6eb-17f2-4f64-86e7-91214509cf93","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","author_link":"https://github.com/lidezhu"}]}}
KUBERNETES_SERVICE_HOST=10.233.0.1
WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/
RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=changes
RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=artifacts
FILE_SERVER_URL=http://fileserver.pingcap.net
JENKINS_HOME=/var/jenkins_home
GIT_COMMIT=03312178c534dce949face80c69812d989e55009
PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin
RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect
GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct
POD_CONTAINER=golang
PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
HUDSON_URL=https://do.pingcap.net/jenkins/
TICDC_COVERALLS_TOKEN=****
JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test
TZ=Asia/Shanghai
BUILD_DISPLAY_NAME=#1855
TEST_GROUP=G06
JENKINS_URL=https://do.pingcap.net/jenkins/
BUILD_ID=1786957579159605252
TICDC_CODECOV_TOKEN=****
GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742
JOB_BASE_NAME=pull_cdc_integration_kafka_test
GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009
RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=tests
SHLVL=5
HOME=/home/jenkins
POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-pvtgm
GOROOT=/usr/local/go
GIT_BRANCH=origin/main
KUBERNETES_PORT_443_TCP_PROTO=tcp
TINI_VERSION=v0.19.0
CI=true
KUBERNETES_SERVICE_PORT_HTTPS=443
WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp
EXECUTOR_NUMBER=0
JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a
NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pvtgm-6sx1h pingcap_tiflow_pull_cdc_integration_kafka_test_1855-pvtgm
GIT_URL=https://github.com/PingCAP-QE/ci.git
HUDSON_HOME=/var/jenkins_home
CLASSPATH=
NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pvtgm-6sx1h
GOPATH=/go
JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect
BUILD_NUMBER=1855
KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1
KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443
GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz
_=/usr/bin/env
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sink_retry/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
[Pipeline] sh
[Pipeline] sh
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:23:35 CST 2024] <<<<<< run test case kafka_sink_error_resume success! >>>>>>
}wait process cdc.test exit for 1-th time...
+ rm -rf /tmp/tidb_cdc_test
+ mkdir -p /tmp/tidb_cdc_test
+ chmod +x ./tests/integration_tests/run_group.sh
+ ./tests/integration_tests/run_group.sh kafka G11
Run cases: resolve_lock move_table autorandom generate_column
PROW_JOB_ID=d886c6eb-17f2-4f64-86e7-91214509cf93
JENKINS_NODE_COOKIE=50093113-34c0-4daf-a037-bfcfa1ed70a5
BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/
GOLANG_VERSION=1.21.0
HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-703xz-4qz9g
HUDSON_SERVER_COOKIE=83ef27fe9acccc92
KUBERNETES_PORT_443_TCP_PORT=443
KUBERNETES_PORT=tcp://10.233.0.1:443
TERM=xterm
STAGE_NAME=Test
BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855
KUBERNETES_SERVICE_PORT=443
GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009
JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786957579159605252","prowjobid":"d886c6eb-17f2-4f64-86e7-91214509cf93","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","author_link":"https://github.com/lidezhu"}]}}
KUBERNETES_SERVICE_HOST=10.233.0.1
WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/
RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=changes
RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=artifacts
FILE_SERVER_URL=http://fileserver.pingcap.net
JENKINS_HOME=/var/jenkins_home
GIT_COMMIT=03312178c534dce949face80c69812d989e55009
PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin
RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect
GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct
POD_CONTAINER=golang
PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
HUDSON_URL=https://do.pingcap.net/jenkins/
TICDC_COVERALLS_TOKEN=****
JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test
TZ=Asia/Shanghai
BUILD_DISPLAY_NAME=#1855
TEST_GROUP=G11
JENKINS_URL=https://do.pingcap.net/jenkins/
BUILD_ID=1786957579159605252
TICDC_CODECOV_TOKEN=****
GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742
JOB_BASE_NAME=pull_cdc_integration_kafka_test
GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009
RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=tests
SHLVL=5
HOME=/home/jenkins
POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-703xz
GOROOT=/usr/local/go
GIT_BRANCH=origin/main
KUBERNETES_PORT_443_TCP_PROTO=tcp
TINI_VERSION=v0.19.0
CI=true
KUBERNETES_SERVICE_PORT_HTTPS=443
WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp
EXECUTOR_NUMBER=0
JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a
NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-703xz pingcap-tiflow-pull-cdc-integration-kafka-test-1855-703xz-4qz9g
GIT_URL=https://github.com/PingCAP-QE/ci.git
HUDSON_HOME=/var/jenkins_home
CLASSPATH=
NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-703xz-4qz9g
GOPATH=/go
JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect
BUILD_NUMBER=1855
KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1
KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443
GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz
_=/usr/bin/env
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/resolve_lock/run.sh using Sink-Type: kafka... <<=================
[Pipeline] sh
table drop_tables.c not exists for 1-th check, retry later
+ rm -rf /tmp/tidb_cdc_test
+ mkdir -p /tmp/tidb_cdc_test
+ chmod +x ./tests/integration_tests/run_group.sh
+ ./tests/integration_tests/run_group.sh kafka G08
Run cases: processor_err_chan changefeed_reconstruct multi_capture synced_status_with_redo
PROW_JOB_ID=d886c6eb-17f2-4f64-86e7-91214509cf93
JENKINS_NODE_COOKIE=3d22109d-dff7-4d9c-8d3e-096842b92ddf
BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/
GOLANG_VERSION=1.21.0
HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f
HUDSON_SERVER_COOKIE=83ef27fe9acccc92
KUBERNETES_PORT_443_TCP_PORT=443
KUBERNETES_PORT=tcp://10.233.0.1:443
TERM=xterm
STAGE_NAME=Test
BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855
KUBERNETES_SERVICE_PORT=443
GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009
JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786957579159605252","prowjobid":"d886c6eb-17f2-4f64-86e7-91214509cf93","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","author_link":"https://github.com/lidezhu"}]}}
KUBERNETES_SERVICE_HOST=10.233.0.1
WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/
RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=changes
RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=artifacts
FILE_SERVER_URL=http://fileserver.pingcap.net
JENKINS_HOME=/var/jenkins_home
GIT_COMMIT=03312178c534dce949face80c69812d989e55009
PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin
RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect
GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct
POD_CONTAINER=golang
PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
HUDSON_URL=https://do.pingcap.net/jenkins/
TICDC_COVERALLS_TOKEN=****
JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test
TZ=Asia/Shanghai
BUILD_DISPLAY_NAME=#1855
TEST_GROUP=G08
JENKINS_URL=https://do.pingcap.net/jenkins/
BUILD_ID=1786957579159605252
TICDC_CODECOV_TOKEN=****
GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742
JOB_BASE_NAME=pull_cdc_integration_kafka_test
GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009
RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=tests
SHLVL=5
HOME=/home/jenkins
POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-lf7fd
GOROOT=/usr/local/go
GIT_BRANCH=origin/main
KUBERNETES_PORT_443_TCP_PROTO=tcp
TINI_VERSION=v0.19.0
CI=true
KUBERNETES_SERVICE_PORT_HTTPS=443
WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp
EXECUTOR_NUMBER=0
JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a
NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f pingcap_tiflow_pull_cdc_integration_kafka_test_1855-lf7fd
GIT_URL=https://github.com/PingCAP-QE/ci.git
HUDSON_HOME=/var/jenkins_home
CLASSPATH=
NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f
GOPATH=/go
JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect
BUILD_NUMBER=1855
KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1
KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443
GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz
_=/usr/bin/env
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_err_chan/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
+ rm -rf /tmp/tidb_cdc_test
+ mkdir -p /tmp/tidb_cdc_test
+ chmod +x ./tests/integration_tests/run_group.sh
+ ./tests/integration_tests/run_group.sh kafka G07
[Pipeline] {
[Pipeline] {
[Pipeline] {
[Pipeline] {
[Pipeline] {
wait process cdc.test exit for 2-th time...
[Pipeline] {
[Pipeline] // container
[Pipeline] sh
+ rm -rf /tmp/tidb_cdc_test
+ mkdir -p /tmp/tidb_cdc_test
+ chmod +x ./tests/integration_tests/run_group.sh
+ ./tests/integration_tests/run_group.sh kafka G10
Run cases: default_value simple cdc_server_tips event_filter sql_mode
PROW_JOB_ID=d886c6eb-17f2-4f64-86e7-91214509cf93
JENKINS_NODE_COOKIE=c08e89dd-4b14-429b-a783-0bb6f5a20ccb
BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/
GOLANG_VERSION=1.21.0
HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0
HUDSON_SERVER_COOKIE=83ef27fe9acccc92
KUBERNETES_PORT_443_TCP_PORT=443
KUBERNETES_PORT=tcp://10.233.0.1:443
TERM=xterm
STAGE_NAME=Test
BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855
KUBERNETES_SERVICE_PORT=443
GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009
JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786957579159605252","prowjobid":"d886c6eb-17f2-4f64-86e7-91214509cf93","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","author_link":"https://github.com/lidezhu"}]}}
KUBERNETES_SERVICE_HOST=10.233.0.1
WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/
RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=changes
RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=artifacts
FILE_SERVER_URL=http://fileserver.pingcap.net
JENKINS_HOME=/var/jenkins_home
GIT_COMMIT=03312178c534dce949face80c69812d989e55009
PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin
RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect
GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct
POD_CONTAINER=golang
PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
HUDSON_URL=https://do.pingcap.net/jenkins/
TICDC_COVERALLS_TOKEN=****
JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test
TZ=Asia/Shanghai
BUILD_DISPLAY_NAME=#1855
TEST_GROUP=G10
JENKINS_URL=https://do.pingcap.net/jenkins/
BUILD_ID=1786957579159605252
TICDC_CODECOV_TOKEN=****
GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742
JOB_BASE_NAME=pull_cdc_integration_kafka_test
GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009
RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=tests
SHLVL=5
HOME=/home/jenkins
POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-34lmg
GOROOT=/usr/local/go
GIT_BRANCH=origin/main
KUBERNETES_PORT_443_TCP_PROTO=tcp
TINI_VERSION=v0.19.0
CI=true
KUBERNETES_SERVICE_PORT_HTTPS=443
WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp
EXECUTOR_NUMBER=0
JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a
NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-34lmg pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0
GIT_URL=https://github.com/PingCAP-QE/ci.git
HUDSON_HOME=/var/jenkins_home
CLASSPATH=
NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0
GOPATH=/go
JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect
BUILD_NUMBER=1855
KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1
KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443
GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz
_=/usr/bin/env
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/default_value/run.sh using Sink-Type: kafka... <<=================
Run cases: kv_client_stream_reconnect cdc split_region
PROW_JOB_ID=d886c6eb-17f2-4f64-86e7-91214509cf93
JENKINS_NODE_COOKIE=d53780ae-3d81-4f91-a68d-b8eb3edc6e9e
BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/
GOLANG_VERSION=1.21.0
HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-1nvp9-6cgzr
HUDSON_SERVER_COOKIE=83ef27fe9acccc92
KUBERNETES_PORT=tcp://10.233.0.1:443
KUBERNETES_PORT_443_TCP_PORT=443
TERM=xterm
STAGE_NAME=Test
BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855
KUBERNETES_SERVICE_PORT=443
GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009
JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786957579159605252","prowjobid":"d886c6eb-17f2-4f64-86e7-91214509cf93","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","author_link":"https://github.com/lidezhu"}]}}
KUBERNETES_SERVICE_HOST=10.233.0.1
WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/
RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=changes
RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=artifacts
FILE_SERVER_URL=http://fileserver.pingcap.net
JENKINS_HOME=/var/jenkins_home
GIT_COMMIT=03312178c534dce949face80c69812d989e55009
PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin
RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect
GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct
POD_CONTAINER=golang
PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
HUDSON_URL=https://do.pingcap.net/jenkins/
TICDC_COVERALLS_TOKEN=****
JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test
TZ=Asia/Shanghai
BUILD_DISPLAY_NAME=#1855
TEST_GROUP=G07
JENKINS_URL=https://do.pingcap.net/jenkins/
BUILD_ID=1786957579159605252
TICDC_CODECOV_TOKEN=****
GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742
JOB_BASE_NAME=pull_cdc_integration_kafka_test
GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009
RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=tests
SHLVL=5
HOME=/home/jenkins
POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-1nvp9
GOROOT=/usr/local/go
GIT_BRANCH=origin/main
KUBERNETES_PORT_443_TCP_PROTO=tcp
TINI_VERSION=v0.19.0
CI=true
KUBERNETES_SERVICE_PORT_HTTPS=443
WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp
EXECUTOR_NUMBER=0
JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a
NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-1nvp9 pingcap-tiflow-pull-cdc-integration-kafka-test-1855-1nvp9-6cgzr
GIT_URL=https://github.com/PingCAP-QE/ci.git
HUDSON_HOME=/var/jenkins_home
CLASSPATH=
NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-1nvp9-6cgzr
GOPATH=/go
JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect
BUILD_NUMBER=1855
KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1
KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443
GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz
_=/usr/bin/env
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kv_client_stream_reconnect/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
table ddl_manager.finish_mark not exists for 9-th check, retry later
wait process cdc.test exit for 3-th time...
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:23:36 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/86669e2a-1031-4503-9f64-aa1aee1c90ea
	{"id":"86669e2a-1031-4503-9f64-aa1aee1c90ea","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879413}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c7005ece
	86669e2a-1031-4503-9f64-aa1aee1c90ea

/tidb/cdc/default/default/upstream/7365350933670305815
	{"id":7365350933670305815,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/86669e2a-1031-4503-9f64-aa1aee1c90ea
	{"id":"86669e2a-1031-4503-9f64-aa1aee1c90ea","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879413}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c7005ece
	86669e2a-1031-4503-9f64-aa1aee1c90ea

/tidb/cdc/default/default/upstream/7365350933670305815
	{"id":7365350933670305815,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/86669e2a-1031-4503-9f64-aa1aee1c90ea
	{"id":"86669e2a-1031-4503-9f64-aa1aee1c90ea","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879413}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c7005ece
	86669e2a-1031-4503-9f64-aa1aee1c90ea

/tidb/cdc/default/default/upstream/7365350933670305815
	{"id":7365350933670305815,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
Create changefeed successfully!
ID: test-normal
Info: {"upstream_id":7365350933670305815,"namespace":"default","id":"test-normal","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-tables-ddl-test-normal-26771?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:23:36.822061816+08:00","start_ts":449545348968873985,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["multi_tables_ddl_test.t1","multi_tables_ddl_test.t2","multi_tables_ddl_test.t3","multi_tables_ddl_test.t4","multi_tables_ddl_test.t1_7","multi_tables_ddl_test.t2_7","multi_tables_ddl_test.finish_mark"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545348968873985,"checkpoint_ts":449545348968873985,"checkpoint_time":"2024-05-05 11:23:33.486"}
+ rm -rf /tmp/tidb_cdc_test
+ mkdir -p /tmp/tidb_cdc_test
+ chmod +x ./tests/integration_tests/run_group.sh
+ ./tests/integration_tests/run_group.sh kafka G09
Run cases: gc_safepoint changefeed_pause_resume cli_with_auth savepoint synced_status
PROW_JOB_ID=d886c6eb-17f2-4f64-86e7-91214509cf93
JENKINS_NODE_COOKIE=f4516b84-ca1f-47af-a284-2137d8ed0b3c
BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/
GOLANG_VERSION=1.21.0
HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3
HUDSON_SERVER_COOKIE=83ef27fe9acccc92
KUBERNETES_PORT_443_TCP_PORT=443
KUBERNETES_PORT=tcp://10.233.0.1:443
TERM=xterm
STAGE_NAME=Test
BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855
KUBERNETES_SERVICE_PORT=443
GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009
JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786957579159605252","prowjobid":"d886c6eb-17f2-4f64-86e7-91214509cf93","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","author_link":"https://github.com/lidezhu"}]}}
KUBERNETES_SERVICE_HOST=10.233.0.1
WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/
RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=changes
RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=artifacts
FILE_SERVER_URL=http://fileserver.pingcap.net
JENKINS_HOME=/var/jenkins_home
GIT_COMMIT=03312178c534dce949face80c69812d989e55009
PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin
RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect
GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct
POD_CONTAINER=golang
PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
HUDSON_URL=https://do.pingcap.net/jenkins/
TICDC_COVERALLS_TOKEN=****
JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test
TZ=Asia/Shanghai
BUILD_DISPLAY_NAME=#1855
TEST_GROUP=G09
JENKINS_URL=https://do.pingcap.net/jenkins/
BUILD_ID=1786957579159605252
TICDC_CODECOV_TOKEN=****
GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742
JOB_BASE_NAME=pull_cdc_integration_kafka_test
GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009
RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=tests
SHLVL=5
HOME=/home/jenkins
POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-3nsj2
GOROOT=/usr/local/go
GIT_BRANCH=origin/main
KUBERNETES_PORT_443_TCP_PROTO=tcp
TINI_VERSION=v0.19.0
CI=true
KUBERNETES_SERVICE_PORT_HTTPS=443
WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp
EXECUTOR_NUMBER=0
JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a
NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-3nsj2 pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3
GIT_URL=https://github.com/PingCAP-QE/ci.git
HUDSON_HOME=/var/jenkins_home
CLASSPATH=
NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3
GOPATH=/go
JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect
BUILD_NUMBER=1855
KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1
KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443
GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz
_=/usr/bin/env
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/gc_safepoint/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
cdc.test: no process found
wait process cdc.test exit for 4-th time...
process cdc.test already exit
[Sun May  5 11:23:37 CST 2024] <<<<<< run test case cli_tls_with_auth success! >>>>>>
[Pipeline] dir
Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
[Pipeline] {
[Pipeline] dir
Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
[Pipeline] {
[Pipeline] dir
Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
[Pipeline] {
[Pipeline] dir
Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
[Pipeline] {
[Pipeline] dir
Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
[Pipeline] {
[Pipeline] dir
Running in /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
[Pipeline] {
[Pipeline] cache
start tidb cluster in /tmp/tidb_cdc_test/sink_retry
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
table drop_tables.c not exists for 2-th check, retry later
Verifying upstream PD is started...
table ddl_manager.finish_mark not exists for 10-th check, retry later
Create changefeed successfully!
ID: test-error-1
Info: {"upstream_id":7365350933670305815,"namespace":"default","id":"test-error-1","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-tables-ddl-test-error-1-10292?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:23:39.057824086+08:00","start_ts":449545348968873985,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["multi_tables_ddl_test.t5","multi_tables_ddl_test.t6","multi_tables_ddl_test.t7","multi_tables_ddl_test.t8"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545348968873985,"checkpoint_ts":449545348968873985,"checkpoint_time":"2024-05-05 11:23:33.486"}
Create changefeed successfully!
ID: test-error-2
Info: {"upstream_id":7365350933670305815,"namespace":"default","id":"test-error-2","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-tables-ddl-test-error-2-4688?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:23:39.265058662+08:00","start_ts":449545348968873985,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["multi_tables_ddl_test.t9","multi_tables_ddl_test.t10"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545348968873985,"checkpoint_ts":449545348968873985,"checkpoint_time":"2024-05-05 11:23:33.486"}
[Sun May  5 11:23:39 CST 2024] <<<<<< START kafka consumer in multi_tables_ddl_v2 case >>>>>>
[Sun May  5 11:23:39 CST 2024] <<<<<< START kafka consumer in multi_tables_ddl_v2 case >>>>>>
[Sun May  5 11:23:39 CST 2024] <<<<<< START kafka consumer in multi_tables_ddl_v2 case >>>>>>
The 1 times to try to start tidb cluster...
start tidb cluster in /tmp/tidb_cdc_test/processor_err_chan
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table drop_tables.c not exists for 3-th check, retry later
start tidb cluster in /tmp/tidb_cdc_test/kv_client_stream_reconnect
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table ddl_manager.finish_mark not exists for 11-th check, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table multi_tables_ddl_test.t55 not exists for 1-th check, retry later
table drop_tables.c exists
check diff successfully
table ddl_manager.finish_mark not exists for 12-th check, retry later
wait process cdc.test exit for 1-th time...
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
wait process cdc.test exit for 2-th time...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:23:44 CST 2024] <<<<<< run test case drop_many_tables success! >>>>>>
start tidb cluster in /tmp/tidb_cdc_test/default_value
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table multi_tables_ddl_test.t55 not exists for 2-th check, retry later
table ddl_manager.finish_mark not exists for 13-th check, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
start tidb cluster in /tmp/tidb_cdc_test/gc_safepoint
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
The 1 times to try to start tidb cluster...
table multi_tables_ddl_test.t55 not exists for 3-th check, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table ddl_manager.finish_mark not exists for 14-th check, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_lost_callback/run.sh using Sink-Type: kafka... <<=================
[Sun May  5 11:23:47 CST 2024] <<<<<< run test case mq_sink_lost_callback success! >>>>>>
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
start tidb cluster in /tmp/tidb_cdc_test/resolve_lock
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table ddl_manager.finish_mark not exists for 15-th check, retry later
table multi_tables_ddl_test.t55 not exists for 4-th check, retry later
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_basic/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc)
3723625472 bytes in 9.76 secs (381693145 bytes/sec)
[Pipeline] {
[Pipeline] cache
table multi_tables_ddl_test.t55 exists
table multi_tables_ddl_test.t66 exists
table multi_tables_ddl_test.t7 exists
table multi_tables_ddl_test.t88 exists
table multi_tables_ddl_test.finish_mark not exists for 1-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table ddl_manager.finish_mark not exists for 16-th check, retry later
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_dispatcher/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1de0140014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pvtgm-6sx1h, pid:1297, start at 2024-05-05 11:23:48.659668771 +0800 CST m=+5.205103391	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:48.668 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:48.664 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:48.664 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1de0140014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pvtgm-6sx1h, pid:1297, start at 2024-05-05 11:23:48.659668771 +0800 CST m=+5.205103391	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:48.668 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:48.664 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:48.664 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1de258000f	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pvtgm-6sx1h, pid:1378, start at 2024-05-05 11:23:48.772162008 +0800 CST m=+5.265672385	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:48.783 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:48.758 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:48.758 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/sink_retry/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/sink_retry/tiflash/log/error.log
arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/sink_retry/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/sink_retry/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/sink_retry/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1e01100017	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f, pid:1402, start at 2024-05-05 11:23:50.767176385 +0800 CST m=+5.129521974	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:50.775 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:50.774 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:50.774 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1e01100017	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f, pid:1402, start at 2024-05-05 11:23:50.767176385 +0800 CST m=+5.129521974	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:50.775 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:50.774 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:50.774 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1e02940007	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f, pid:1476, start at 2024-05-05 11:23:50.831500789 +0800 CST m=+5.139003115	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:50.837 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:50.821 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:50.821 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/processor_err_chan/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/processor_err_chan/tiflash/log/error.log
table multi_tables_ddl_test.finish_mark not exists for 2-th check, retry later
arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/processor_err_chan/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/processor_err_chan/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/processor_err_chan/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1e12780013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-1nvp9-6cgzr, pid:1352, start at 2024-05-05 11:23:51.882161646 +0800 CST m=+5.154730484	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:51.890 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:51.888 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:51.888 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1e12780013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-1nvp9-6cgzr, pid:1352, start at 2024-05-05 11:23:51.882161646 +0800 CST m=+5.154730484	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:51.890 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:51.888 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:51.888 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1e163c000c	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-1nvp9-6cgzr, pid:1435, start at 2024-05-05 11:23:52.091327432 +0800 CST m=+5.309546816	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:52.098 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:52.079 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:52.079 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/log/error.log
arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/log/proxy.log"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kv_client_stream_reconnect/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table ddl_manager.finish_mark not exists for 17-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
start tidb cluster in /tmp/tidb_cdc_test/kafka_simple_basic
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sink_retry.cli.2827.out cli tso query --pd=http://127.0.0.1:2379
table multi_tables_ddl_test.finish_mark exists
check table exists success
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=test-normal
+ expected_state=normal
+ error_msg=null
+ tls_dir=
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test-normal -s
start tidb cluster in /tmp/tidb_cdc_test/mq_sink_dispatcher
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
+ info='{
  "upstream_id": 7365350933670305815,
  "namespace": "default",
  "id": "test-normal",
  "state": "normal",
  "checkpoint_tso": 449545351262634014,
  "checkpoint_time": "2024-05-05 11:23:42.236",
  "error": null
}'
+ echo '{
  "upstream_id": 7365350933670305815,
  "namespace": "default",
  "id": "test-normal",
  "state": "normal",
  "checkpoint_tso": 449545351262634014,
  "checkpoint_time": "2024-05-05 11:23:42.236",
  "error": null
}'
{
  "upstream_id": 7365350933670305815,
  "namespace": "default",
  "id": "test-normal",
  "state": "normal",
  "checkpoint_tso": 449545351262634014,
  "checkpoint_time": "2024-05-05 11:23:42.236",
  "error": null
}
++ echo '{' '"upstream_id":' 7365350933670305815, '"namespace":' '"default",' '"id":' '"test-normal",' '"state":' '"normal",' '"checkpoint_tso":' 449545351262634014, '"checkpoint_time":' '"2024-05-05' '11:23:42.236",' '"error":' null '}'
++ jq -r .state
+ state=normal
+ [[ ! normal == \n\o\r\m\a\l ]]
++ echo '{' '"upstream_id":' 7365350933670305815, '"namespace":' '"default",' '"id":' '"test-normal",' '"state":' '"normal",' '"checkpoint_tso":' 449545351262634014, '"checkpoint_time":' '"2024-05-05' '11:23:42.236",' '"error":' null '}'
++ jq -r .error.message
+ message=null
+ [[ ! null =~ null ]]
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=test-error-1
+ expected_state=normal
+ error_msg=null
+ tls_dir=
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test-error-1 -s
+ info='{
  "upstream_id": 7365350933670305815,
  "namespace": "default",
  "id": "test-error-1",
  "state": "normal",
  "checkpoint_tso": 449545354395254788,
  "checkpoint_time": "2024-05-05 11:23:54.186",
  "error": null
}'
+ echo '{
  "upstream_id": 7365350933670305815,
  "namespace": "default",
  "id": "test-error-1",
  "state": "normal",
  "checkpoint_tso": 449545354395254788,
  "checkpoint_time": "2024-05-05 11:23:54.186",
  "error": null
}'
{
  "upstream_id": 7365350933670305815,
  "namespace": "default",
  "id": "test-error-1",
  "state": "normal",
  "checkpoint_tso": 449545354395254788,
  "checkpoint_time": "2024-05-05 11:23:54.186",
  "error": null
}
++ echo '{' '"upstream_id":' 7365350933670305815, '"namespace":' '"default",' '"id":' '"test-error-1",' '"state":' '"normal",' '"checkpoint_tso":' 449545354395254788, '"checkpoint_time":' '"2024-05-05' '11:23:54.186",' '"error":' null '}'
++ jq -r .state
+ state=normal
+ [[ ! normal == \n\o\r\m\a\l ]]
++ echo '{' '"upstream_id":' 7365350933670305815, '"namespace":' '"default",' '"id":' '"test-error-1",' '"state":' '"normal",' '"checkpoint_tso":' 449545354395254788, '"checkpoint_time":' '"2024-05-05' '11:23:54.186",' '"error":' null '}'
++ jq -r .error.message
+ message=null
+ [[ ! null =~ null ]]
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=test-error-2
+ expected_state=failed
+ error_msg=ErrSyncRenameTableFailed
+ tls_dir=
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test-error-2 -s
[Sun May  5 11:23:55 CST 2024] <<<<<< START cdc server in kv_client_stream_reconnect case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ (( i = 0 ))
+ (( i <= 50 ))
+ GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/kv/kvClientForceReconnect=return(true)'
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kv_client_stream_reconnect.28202822.out server --log-file /tmp/tidb_cdc_test/kv_client_stream_reconnect/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kv_client_stream_reconnect/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
table ddl_manager.finish_mark not exists for 18-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ info='{
  "upstream_id": 7365350933670305815,
  "namespace": "default",
  "id": "test-error-2",
  "state": "failed",
  "checkpoint_tso": 449545350790774794,
  "checkpoint_time": "2024-05-05 11:23:40.436",
  "error": {
    "time": "2024-05-05T11:23:43.207439725+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrSyncRenameTableFailed",
    "message": "[CDC:ErrSyncRenameTableFailed]table'\''s old name is not in filter rule, and its new name in filter rule table id '\''130'\'', ddl query: [rename table t11 to t9], it'\''s an unexpected behavior, if you want to replicate this table, please add its old name to filter rule."
  }
}'
+ echo '{
  "upstream_id": 7365350933670305815,
  "namespace": "default",
  "id": "test-error-2",
  "state": "failed",
  "checkpoint_tso": 449545350790774794,
  "checkpoint_time": "2024-05-05 11:23:40.436",
  "error": {
    "time": "2024-05-05T11:23:43.207439725+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrSyncRenameTableFailed",
    "message": "[CDC:ErrSyncRenameTableFailed]table'\''s old name is not in filter rule, and its new name in filter rule table id '\''130'\'', ddl query: [rename table t11 to t9], it'\''s an unexpected behavior, if you want to replicate this table, please add its old name to filter rule."
  }
}'
{
  "upstream_id": 7365350933670305815,
  "namespace": "default",
  "id": "test-error-2",
  "state": "failed",
  "checkpoint_tso": 449545350790774794,
  "checkpoint_time": "2024-05-05 11:23:40.436",
  "error": {
    "time": "2024-05-05T11:23:43.207439725+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrSyncRenameTableFailed",
    "message": "[CDC:ErrSyncRenameTableFailed]table's old name is not in filter rule, and its new name in filter rule table id '130', ddl query: [rename table t11 to t9], it's an unexpected behavior, if you want to replicate this table, please add its old name to filter rule."
  }
}
++ echo '{' '"upstream_id":' 7365350933670305815, '"namespace":' '"default",' '"id":' '"test-error-2",' '"state":' '"failed",' '"checkpoint_tso":' 449545350790774794, '"checkpoint_time":' '"2024-05-05' '11:23:40.436",' '"error":' '{' '"time":' '"2024-05-05T11:23:43.207439725+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrSyncRenameTableFailed",' '"message":' '"[CDC:ErrSyncRenameTableFailed]table'\''s' old name is not in filter rule, and its new name in filter rule table id ''\''130'\'',' ddl query: '[rename' table t11 to 't9],' 'it'\''s' an unexpected behavior, if you want to replicate this table, please add its old name to filter 'rule."' '}' '}'
++ jq -r .state
+ state=failed
+ [[ ! failed == \f\a\i\l\e\d ]]
++ echo '{' '"upstream_id":' 7365350933670305815, '"namespace":' '"default",' '"id":' '"test-error-2",' '"state":' '"failed",' '"checkpoint_tso":' 449545350790774794, '"checkpoint_time":' '"2024-05-05' '11:23:40.436",' '"error":' '{' '"time":' '"2024-05-05T11:23:43.207439725+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrSyncRenameTableFailed",' '"message":' '"[CDC:ErrSyncRenameTableFailed]table'\''s' old name is not in filter rule, and its new name in filter rule table id ''\''130'\'',' ddl query: '[rename' table t11 to 't9],' 'it'\''s' an unexpected behavior, if you want to replicate this table, please add its old name to filter 'rule."' '}' '}'
++ jq -r .error.message
+ message='[CDC:ErrSyncRenameTableFailed]table'\''s old name is not in filter rule, and its new name in filter rule table id '\''130'\'', ddl query: [rename table t11 to t9], it'\''s an unexpected behavior, if you want to replicate this table, please add its old name to filter rule.'
+ [[ ! [CDC:ErrSyncRenameTableFailed]table's old name is not in filter rule, and its new name in filter rule table id '130', ddl query: [rename table t11 to t9], it's an unexpected behavior, if you want to replicate this table, please add its old name to filter rule. =~ ErrSyncRenameTableFailed ]]
check diff successfully
+ set +x
+ tso='449545354376380418
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545354376380418 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
wait process cdc.test exit for 1-th time...
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_stop_delay/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
***************** properties *****************
"mysql.user"="root"
"operationcount"="0"
"readproportion"="0"
"updateproportion"="0"
"workload"="core"
"scanproportion"="0"
"threadcount"="2"
"mysql.host"="127.0.0.1"
"dotransactions"="false"
"mysql.db"="sink_retry"
"recordcount"="10"
"insertproportion"="0"
"mysql.port"="4000"
"requestdistribution"="uniform"
"readallfields"="true"
**********************************************
Run finished, takes 9.001979ms
INSERT - Takes(s): 0.0, Count: 10, OPS: 1973.6, Avg(us): 1693, Min(us): 997, Max(us): 3824, 95th(us): 4000, 99th(us): 4000
[Sun May  5 11:23:56 CST 2024] <<<<<< START cdc server in sink_retry case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/sink/dmlsink/txn/mysql/MySQLSinkTxnRandomError=25%return(true)'
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sink_retry.28792881.out server --log-file /tmp/tidb_cdc_test/sink_retry/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/sink_retry/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
wait process cdc.test exit for 2-th time...
wait process cdc.test exit for 3-th time...
table ddl_manager.finish_mark not exists for 19-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1e45ec000c	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:1369, start at 2024-05-05 11:23:55.142624983 +0800 CST m=+6.319581913	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:55.148 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:55.131 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:55.131 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1e45ec000c	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:1369, start at 2024-05-05 11:23:55.142624983 +0800 CST m=+6.319581913	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:55.148 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:55.131 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:55.131 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1e35780003	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:1458, start at 2024-05-05 11:23:54.080719981 +0800 CST m=+5.200310927	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:54.087 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:54.078 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:54.078 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/default_value/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/default_value/tiflash/log/error.log
arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/default_value/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/default_value/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/default_value/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
Verifying downstream PD is started...
cdc.test: no process found
wait process cdc.test exit for 4-th time...
process cdc.test already exit
[Sun May  5 11:23:57 CST 2024] <<<<<< run test case multi_tables_ddl_v2 success! >>>>>>
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1e3f280013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3, pid:1408, start at 2024-05-05 11:23:54.726285529 +0800 CST m=+5.418669104	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:54.736 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:54.698 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:54.698 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1e3f280013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3, pid:1408, start at 2024-05-05 11:23:54.726285529 +0800 CST m=+5.418669104	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:54.736 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:54.698 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:54.698 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1e3fd80015	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3, pid:1487, start at 2024-05-05 11:23:54.783718656 +0800 CST m=+5.420448550	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:54.791 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:54.792 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:54.792 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/gc_safepoint/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/gc_safepoint/tiflash/log/error.log
arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/gc_safepoint/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/gc_safepoint/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/gc_safepoint/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:23:58 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/d5df1b55-235c-4156-bda1-d3e8df88ac67
	{"id":"d5df1b55-235c-4156-bda1-d3e8df88ac67","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879435}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c758d9c6
	d5df1b55-235c-4156-bda1-d3e8df88ac67

/tidb/cdc/default/default/upstream/7365351039573873706
	{"id":7365351039573873706,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/d5df1b55-235c-4156-bda1-d3e8df88ac67
	{"id":"d5df1b55-235c-4156-bda1-d3e8df88ac67","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879435}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c758d9c6
	d5df1b55-235c-4156-bda1-d3e8df88ac67

/tidb/cdc/default/default/upstream/7365351039573873706
	{"id":7365351039573873706,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/d5df1b55-235c-4156-bda1-d3e8df88ac67
	{"id":"d5df1b55-235c-4156-bda1-d3e8df88ac67","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879435}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c758d9c6
	d5df1b55-235c-4156-bda1-d3e8df88ac67

/tidb/cdc/default/default/upstream/7365351039573873706
	{"id":7365351039573873706,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
[Sun May  5 11:23:58 CST 2024] <<<<<< START kafka consumer in kv_client_stream_reconnect case >>>>>>
table ddl_manager.finish_mark not exists for 20-th check, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
start tidb cluster in /tmp/tidb_cdc_test/processor_stop_delay
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.default_value.cli.2919.out cli tso query --pd=http://127.0.0.1:2379
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1e79d00009	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-703xz-4qz9g, pid:1362, start at 2024-05-05 11:23:58.462116213 +0800 CST m=+5.264602129	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:58.469 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:58.452 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:58.452 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[Sun May  5 11:23:59 CST 2024] <<<<<< START cdc server in gc_safepoint case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ GO_FAILPOINTS='github.com/pingcap/tiflow/pkg/txnutil/gc/InjectGcSafepointUpdateInterval=return(500)'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.gc_safepoint.28232825.out server --log-file /tmp/tidb_cdc_test/gc_safepoint/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/gc_safepoint/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:23:59 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/6ed9ed0d-55c3-493e-a60f-70e8524552bf
	{"id":"6ed9ed0d-55c3-493e-a60f-70e8524552bf","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879437}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c751ebf6
	6ed9ed0d-55c3-493e-a60f-70e8524552bf

/tidb/cdc/default/default/upstream/7365351027732875165
	{"id":7365351027732875165,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/6ed9ed0d-55c3-493e-a60f-70e8524552bf
	{"id":"6ed9ed0d-55c3-493e-a60f-70e8524552bf","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879437}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c751ebf6
	6ed9ed0d-55c3-493e-a60f-70e8524552bf

/tidb/cdc/default/default/upstream/7365351027732875165
	{"id":7365351027732875165,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/6ed9ed0d-55c3-493e-a60f-70e8524552bf
	{"id":"6ed9ed0d-55c3-493e-a60f-70e8524552bf","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879437}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c751ebf6
	6ed9ed0d-55c3-493e-a60f-70e8524552bf

/tidb/cdc/default/default/upstream/7365351027732875165
	{"id":7365351027732875165,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sink_retry.cli.2936.out cli changefeed create --start-ts=449545354376380418 '--sink-uri=kafka://127.0.0.1:9092/ticdc-sink-retry-test-28396?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760'
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Create changefeed successfully!
ID: c0d807b0-ff7f-430a-933c-50c590df2cf9
Info: {"upstream_id":7365351027732875165,"namespace":"default","id":"c0d807b0-ff7f-430a-933c-50c590df2cf9","sink_uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-28396?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:24:00.229565192+08:00","start_ts":449545354376380418,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545354376380418,"checkpoint_ts":449545354376380418,"checkpoint_time":"2024-05-05 11:23:54.114"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
table ddl_manager.finish_mark not exists for 21-th check, retry later
+ set +x
+ tso='449545355809783809
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545355809783809 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
[Sun May  5 11:24:01 CST 2024] <<<<<< START cdc server in default_value case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ GO_FAILPOINTS=
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.default_value.29562958.out server --log-file /tmp/tidb_cdc_test/default_value/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/default_value/cdc_data --cluster-id default
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1e79d00009	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-703xz-4qz9g, pid:1362, start at 2024-05-05 11:23:58.462116213 +0800 CST m=+5.264602129	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:58.469 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:58.452 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:58.452 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1e79d00009	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-703xz-4qz9g, pid:1445, start at 2024-05-05 11:23:58.463421003 +0800 CST m=+5.206852624	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:25:58.469 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:23:58.452 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:13:58.452 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/resolve_lock/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/resolve_lock/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/resolve_lock/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/resolve_lock/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/resolve_lock/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ set +x
[Sun May  5 11:24:01 CST 2024] <<<<<< START kafka consumer in sink_retry case >>>>>>
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
[Sun May  5 11:24:01 CST 2024] <<<<<< START cdc server in processor_err_chan case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ (( i = 0 ))
+ (( i <= 50 ))
+ GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/ProcessorAddTableError=1*return(true)'
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.processor_err_chan.29552957.out server --log-file /tmp/tidb_cdc_test/processor_err_chan/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/processor_err_chan/cdc_data --cluster-id default --config /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_err_chan/conf/server.toml --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:24:02 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/38baeb22-e1c8-4f6d-90bf-06d5d555fa71
	{"id":"38baeb22-e1c8-4f6d-90bf-06d5d555fa71","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879440}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c76aaace
	38baeb22-e1c8-4f6d-90bf-06d5d555fa71

/tidb/cdc/default/default/upstream/7365351051259690270
	{"id":7365351051259690270,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/38baeb22-e1c8-4f6d-90bf-06d5d555fa71
	{"id":"38baeb22-e1c8-4f6d-90bf-06d5d555fa71","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879440}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c76aaace
	38baeb22-e1c8-4f6d-90bf-06d5d555fa71

/tidb/cdc/default/default/upstream/7365351051259690270
	{"id":7365351051259690270,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/38baeb22-e1c8-4f6d-90bf-06d5d555fa71
	{"id":"38baeb22-e1c8-4f6d-90bf-06d5d555fa71","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879440}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c76aaace
	38baeb22-e1c8-4f6d-90bf-06d5d555fa71

/tidb/cdc/default/default/upstream/7365351051259690270
	{"id":7365351051259690270,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
[Sun May  5 11:24:02 CST 2024] <<<<<< START kafka consumer in gc_safepoint case >>>>>>
0
table ddl_manager.finish_mark not exists for 22-th check, retry later
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resolve_lock.cli.2896.out cli tso query --pd=http://127.0.0.1:2379
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
check diff failed 1-th time, retry later
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:24:04 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/87db3762-0012-495c-a383-80a4219710f4
	{"id":"87db3762-0012-495c-a383-80a4219710f4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879441}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c766e4d2
	87db3762-0012-495c-a383-80a4219710f4

/tidb/cdc/default/default/upstream/7365351048411344274
	{"id":7365351048411344274,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/87db3762-0012-495c-a383-80a4219710f4
	{"id":"87db3762-0012-495c-a383-80a4219710f4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879441}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c766e4d2
	87db3762-0012-495c-a383-80a4219710f4

/tidb/cdc/default/default/upstream/7365351048411344274
	{"id":7365351048411344274,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/87db3762-0012-495c-a383-80a4219710f4
	{"id":"87db3762-0012-495c-a383-80a4219710f4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879441}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c766e4d2
	87db3762-0012-495c-a383-80a4219710f4

/tidb/cdc/default/default/upstream/7365351048411344274
	{"id":7365351048411344274,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.default_value.cli.3012.out cli changefeed create --start-ts=449545355809783809 '--sink-uri=kafka://127.0.0.1:9092/ticdc-default-value-test-18247?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760'
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:24:04 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/7cadf263-6835-427b-bdb1-acb1d4b9cab4
	{"id":"7cadf263-6835-427b-bdb1-acb1d4b9cab4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879441}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c7567480
	7cadf263-6835-427b-bdb1-acb1d4b9cab4

/tidb/cdc/default/default/upstream/7365351034633114428
	{"id":7365351034633114428,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/7cadf263-6835-427b-bdb1-acb1d4b9cab4
	{"id":"7cadf263-6835-427b-bdb1-acb1d4b9cab4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879441}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c7567480
	7cadf263-6835-427b-bdb1-acb1d4b9cab4

/tidb/cdc/default/default/upstream/7365351034633114428
	{"id":7365351034633114428,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/7cadf263-6835-427b-bdb1-acb1d4b9cab4
	{"id":"7cadf263-6835-427b-bdb1-acb1d4b9cab4","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879441}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c7567480
	7cadf263-6835-427b-bdb1-acb1d4b9cab4

/tidb/cdc/default/default/upstream/7365351034633114428
	{"id":7365351034633114428,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
Create changefeed successfully!
ID: 092d2ef3-f8eb-4335-91ff-4fd1d7254c43
Info: {"upstream_id":7365351048411344274,"namespace":"default","id":"092d2ef3-f8eb-4335-91ff-4fd1d7254c43","sink_uri":"kafka://127.0.0.1:9092/ticdc-default-value-test-18247?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:24:04.652498407+08:00","start_ts":449545355809783809,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545355809783809,"checkpoint_ts":449545355809783809,"checkpoint_time":"2024-05-05 11:23:59.582"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
[Sun May  5 11:24:04 CST 2024] <<<<<< START kafka consumer in processor_err_chan case >>>>>>
check_changefeed_state http://127.0.0.1:2379 5b335a6b-84b0-45a3-9867-3135a17cef5f normal null
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=5b335a6b-84b0-45a3-9867-3135a17cef5f
+ expected_state=normal
+ error_msg=null
+ tls_dir=null
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 5b335a6b-84b0-45a3-9867-3135a17cef5f -s
+ info='{
  "upstream_id": 7365351034633114428,
  "namespace": "default",
  "id": "5b335a6b-84b0-45a3-9867-3135a17cef5f",
  "state": "normal",
  "checkpoint_tso": 449545357170835462,
  "checkpoint_time": "2024-05-05 11:24:04.774",
  "error": null
}'
+ echo '{
  "upstream_id": 7365351034633114428,
  "namespace": "default",
  "id": "5b335a6b-84b0-45a3-9867-3135a17cef5f",
  "state": "normal",
  "checkpoint_tso": 449545357170835462,
  "checkpoint_time": "2024-05-05 11:24:04.774",
  "error": null
}'
{
  "upstream_id": 7365351034633114428,
  "namespace": "default",
  "id": "5b335a6b-84b0-45a3-9867-3135a17cef5f",
  "state": "normal",
  "checkpoint_tso": 449545357170835462,
  "checkpoint_time": "2024-05-05 11:24:04.774",
  "error": null
}
++ echo '{' '"upstream_id":' 7365351034633114428, '"namespace":' '"default",' '"id":' '"5b335a6b-84b0-45a3-9867-3135a17cef5f",' '"state":' '"normal",' '"checkpoint_tso":' 449545357170835462, '"checkpoint_time":' '"2024-05-05' '11:24:04.774",' '"error":' null '}'
++ jq -r .state
+ state=normal
+ [[ ! normal == \n\o\r\m\a\l ]]
++ echo '{' '"upstream_id":' 7365351034633114428, '"namespace":' '"default",' '"id":' '"5b335a6b-84b0-45a3-9867-3135a17cef5f",' '"state":' '"normal",' '"checkpoint_tso":' 449545357170835462, '"checkpoint_time":' '"2024-05-05' '11:24:04.774",' '"error":' null '}'
++ jq -r .error.message
table ddl_manager.finish_mark not exists for 23-th check, retry later
+ set +x
+ tso='449545356929400833
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545356929400833 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
[Sun May  5 11:24:05 CST 2024] <<<<<< START cdc server in resolve_lock case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ GO_FAILPOINTS=
+ (( i = 0 ))
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resolve_lock.29302932.out server --log-file /tmp/tidb_cdc_test/resolve_lock/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/resolve_lock/cdc_data --cluster-id default
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
+ message=null
+ [[ ! null =~ null ]]
run task successfully
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1ed6c00015	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:5756, start at 2024-05-05 11:24:04.424512246 +0800 CST m=+5.271033988	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:26:04.434 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:24:04.400 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:14:04.400 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
check diff failed 1-th time, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1edf000021	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd, pid:12486, start at 2024-05-05 11:24:04.977656265 +0800 CST m=+5.167530432	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:26:04.985 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:24:04.978 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:14:04.978 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ set +x
[Sun May  5 11:24:06 CST 2024] <<<<<< START kafka consumer in default_value case >>>>>>
go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f
go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d
go: downloading go.uber.org/zap v1.27.0
go: downloading github.com/google/uuid v1.6.0
go: downloading golang.org/x/sync v0.7.0
go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c
go: downloading golang.org/x/time v0.5.0
go: downloading github.com/BurntSushi/toml v1.3.2
go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b
go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4
go: downloading google.golang.org/grpc v1.62.1
go: downloading github.com/go-sql-driver/mysql v1.7.1
go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1
go: downloading go.uber.org/atomic v1.11.0
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
go: downloading go.uber.org/multierr v1.11.0
go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5
go: downloading github.com/coreos/go-semver v0.3.1
go: downloading google.golang.org/protobuf v1.33.0
go: downloading golang.org/x/sys v0.19.0
go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda
go: downloading golang.org/x/net v0.24.0
go: downloading github.com/golang/protobuf v1.5.4
go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda
go: downloading golang.org/x/text v0.14.0
check diff failed 2-th time, retry later
table ddl_manager.finish_mark not exists for 24-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1ed6c00015	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:5756, start at 2024-05-05 11:24:04.424512246 +0800 CST m=+5.271033988	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:26:04.434 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:24:04.400 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:14:04.400 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1ed9140008	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:5838, start at 2024-05-05 11:24:04.557607748 +0800 CST m=+5.353519000	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:26:04.564 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:24:04.549 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:14:04.549 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/kafka_simple_basic/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/kafka_simple_basic/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_simple_basic/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_simple_basic/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_simple_basic/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
check diff failed 2-th time, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1edf000021	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd, pid:12486, start at 2024-05-05 11:24:04.977656265 +0800 CST m=+5.167530432	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:26:04.985 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:24:04.978 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:14:04.978 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1ee1580009	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd, pid:12572, start at 2024-05-05 11:24:05.090822905 +0800 CST m=+5.228000059	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:26:05.097 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:24:05.078 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:14:05.078 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash/log/error.log
arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash/db/proxy"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/mq_sink_dispatcher/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:24:08 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/f72ac339-ed3c-4a82-96ae-ab536fc11497
	{"id":"f72ac339-ed3c-4a82-96ae-ab536fc11497","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879445}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c77601d6
	f72ac339-ed3c-4a82-96ae-ab536fc11497

/tidb/cdc/default/default/upstream/7365351069860358412
	{"id":7365351069860358412,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/f72ac339-ed3c-4a82-96ae-ab536fc11497
	{"id":"f72ac339-ed3c-4a82-96ae-ab536fc11497","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879445}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c77601d6
	f72ac339-ed3c-4a82-96ae-ab536fc11497

/tidb/cdc/default/default/upstream/7365351069860358412
	{"id":7365351069860358412,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/f72ac339-ed3c-4a82-96ae-ab536fc11497
	{"id":"f72ac339-ed3c-4a82-96ae-ab536fc11497","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879445}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c77601d6
	f72ac339-ed3c-4a82-96ae-ab536fc11497

/tidb/cdc/default/default/upstream/7365351069860358412
	{"id":7365351069860358412,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resolve_lock.cli.2989.out cli changefeed create --start-ts=449545356929400833 '--sink-uri=kafka://127.0.0.1:9092/ticdc-resolve-lock-test-28054?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760'
go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548
go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8
go: downloading github.com/spf13/pflag v1.0.5
go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5
go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible
go: downloading github.com/prometheus/client_golang v1.19.0
go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0
go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754
go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5
go: downloading github.com/prometheus/client_model v0.6.1
go: downloading github.com/coocood/freecache v1.2.1
go: downloading github.com/opentracing/opentracing-go v1.2.0
go: downloading github.com/cockroachdb/errors v1.11.1
go: downloading gopkg.in/yaml.v2 v2.4.0
go: downloading github.com/docker/go-units v0.5.0
go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e
go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50
go: downloading github.com/shirou/gopsutil/v3 v3.24.2
go: downloading github.com/jellydator/ttlcache/v3 v3.0.1
go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a
go: downloading github.com/influxdata/tdigest v0.0.1
go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22
go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7
go: downloading go.etcd.io/etcd/client/v3 v3.5.12
go: downloading github.com/stretchr/testify v1.9.0
go: downloading github.com/scalalang2/golang-fifo v0.1.5
go: downloading cloud.google.com/go/storage v1.39.1
go: downloading github.com/tidwall/btree v1.7.0
go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1
go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1
go: downloading github.com/gorilla/mux v1.8.0
go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0
go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581
go: downloading github.com/aws/aws-sdk-go v1.50.0
go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b
go: downloading github.com/opentracing/basictracer-go v1.1.0
go: downloading go.etcd.io/etcd/api/v3 v3.5.12
go: downloading github.com/twmb/murmur3 v1.1.6
go: downloading github.com/go-resty/resty/v2 v2.11.0
go: downloading github.com/klauspost/compress v1.17.8
go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9
go: downloading golang.org/x/oauth2 v0.18.0
go: downloading cloud.google.com/go v0.112.2
go: downloading google.golang.org/api v0.170.0
go: downloading go.uber.org/mock v0.4.0
go: downloading github.com/google/btree v1.1.2
go: downloading github.com/gogo/protobuf v1.3.2
go: downloading golang.org/x/tools v0.20.0
go: downloading github.com/cockroachdb/pebble v1.1.0
go: downloading github.com/jfcg/sorty/v2 v2.1.0
go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef
go: downloading github.com/carlmjohnson/flagext v0.21.0
go: downloading github.com/cespare/xxhash/v2 v2.3.0
go: downloading github.com/dolthub/swiss v0.2.1
go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df
go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2
go: downloading github.com/dgraph-io/ristretto v0.1.1
go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec
go: downloading github.com/golang/snappy v0.0.4
go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117
go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc
go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2
go: downloading gopkg.in/yaml.v3 v3.0.1
go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1
go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1
go: downloading golang.org/x/crypto v0.22.0
go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12
go: downloading github.com/beorn7/perks v1.0.1
go: downloading github.com/prometheus/common v0.52.2
go: downloading github.com/prometheus/procfs v0.13.0
go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b
go: downloading github.com/cockroachdb/redact v1.1.5
go: downloading github.com/getsentry/sentry-go v0.27.0
go: downloading github.com/pkg/errors v0.9.1
go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible
go: downloading github.com/cloudfoundry/gosigar v1.3.6
go: downloading github.com/otiai10/copy v1.2.0
go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21
go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2
check diff successfully
check_safepoint_forward http://127.0.0.1:2379 7365351051259690270 449545357845856260 449545356652838917
go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13
go: downloading github.com/spkg/bom v1.0.0
go: downloading github.com/xitongsys/parquet-go v1.6.0
go: downloading github.com/tklauser/go-sysconf v0.3.12
go: downloading github.com/jfcg/sixb v1.3.8
go: downloading cloud.google.com/go/compute/metadata v0.2.3
go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda
go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815
go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f
go: downloading github.com/dolthub/maphash v0.1.0
go: downloading github.com/kr/pretty v0.3.1
go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358
go: downloading cloud.google.com/go/compute v1.25.1
go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4
go: downloading cloud.google.com/go/iam v1.1.7
go: downloading github.com/googleapis/gax-go/v2 v2.12.3
go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d
go: downloading github.com/cheggaaa/pb/v3 v3.0.8
go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989
go: downloading github.com/coreos/go-systemd/v22 v22.5.0
go: downloading github.com/robfig/cron/v3 v3.0.1
go: downloading github.com/kylelemons/godebug v1.1.0
go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c
go: downloading github.com/mattn/go-runewidth v0.0.15
go: downloading github.com/robfig/cron v1.2.0
go: downloading github.com/kr/text v0.2.0
go: downloading github.com/rogpeppe/go-internal v1.12.0
go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac
go: downloading go.opentelemetry.io/otel v1.24.0
go: downloading go.opentelemetry.io/otel/trace v1.24.0
go: downloading github.com/tklauser/numcpus v0.6.1
go: downloading github.com/apache/thrift v0.16.0
go: downloading github.com/VividCortex/ewma v1.2.0
go: downloading github.com/fatih/color v1.16.0
go: downloading github.com/mattn/go-colorable v0.1.13
go: downloading github.com/mattn/go-isatty v0.0.20
go: downloading github.com/lestrrat-go/blackmagic v1.0.2
go: downloading github.com/lestrrat-go/httprc v1.0.5
go: downloading github.com/lestrrat-go/iter v1.0.2
go: downloading github.com/lestrrat-go/option v1.0.1
go: downloading github.com/golang-jwt/jwt/v5 v5.2.0
go: downloading github.com/dustin/go-humanize v1.0.1
go: downloading github.com/golang/glog v1.2.0
go: downloading github.com/rivo/uniseg v0.4.7
go: downloading github.com/lestrrat-go/httpcc v1.0.1
go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2
go: downloading github.com/ncw/directio v1.0.5
go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64
go: downloading github.com/klauspost/cpuid v1.3.1
go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table ddl_manager.finish_mark not exists for 25-th check, retry later
go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da
go: downloading github.com/go-logr/logr v1.4.1
go: downloading go.opentelemetry.io/otel/metric v1.24.0
go: downloading github.com/go-logr/stdr v1.2.2
go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06
go: downloading github.com/DataDog/zstd v1.5.5
[Sun May  5 11:24:09 CST 2024] <<<<<< START cdc server in kafka_simple_basic case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_basic.71737175.out server --log-file /tmp/tidb_cdc_test/kafka_simple_basic/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_simple_basic/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
Create changefeed successfully!
ID: 8a7648e2-95b6-4adb-85f7-9e0af73e5fdd
Info: {"upstream_id":7365351069860358412,"namespace":"default","id":"8a7648e2-95b6-4adb-85f7-9e0af73e5fdd","sink_uri":"kafka://127.0.0.1:9092/ticdc-resolve-lock-test-28054?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:24:09.55126203+08:00","start_ts":449545356929400833,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545356929400833,"checkpoint_ts":449545356929400833,"checkpoint_time":"2024-05-05 11:24:03.853"}
PASS
run task successfully
check_changefeed_state http://127.0.0.1:2379 59d1b199-1746-4775-a56f-27961861224f stopped null
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=59d1b199-1746-4775-a56f-27961861224f
+ expected_state=stopped
+ error_msg=null
+ tls_dir=null
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 59d1b199-1746-4775-a56f-27961861224f -s
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
+ info='{
  "upstream_id": 7365351051259690270,
  "namespace": "default",
  "id": "59d1b199-1746-4775-a56f-27961861224f",
  "state": "stopped",
  "checkpoint_tso": 449545358108000260,
  "checkpoint_time": "2024-05-05 11:24:08.349",
  "error": null
}'
+ echo '{
  "upstream_id": 7365351051259690270,
  "namespace": "default",
  "id": "59d1b199-1746-4775-a56f-27961861224f",
  "state": "stopped",
  "checkpoint_tso": 449545358108000260,
  "checkpoint_time": "2024-05-05 11:24:08.349",
  "error": null
}'
{
  "upstream_id": 7365351051259690270,
  "namespace": "default",
  "id": "59d1b199-1746-4775-a56f-27961861224f",
  "state": "stopped",
  "checkpoint_tso": 449545358108000260,
  "checkpoint_time": "2024-05-05 11:24:08.349",
  "error": null
}
++ echo '{' '"upstream_id":' 7365351051259690270, '"namespace":' '"default",' '"id":' '"59d1b199-1746-4775-a56f-27961861224f",' '"state":' '"stopped",' '"checkpoint_tso":' 449545358108000260, '"checkpoint_time":' '"2024-05-05' '11:24:08.349",' '"error":' null '}'
++ jq -r .state
+ state=stopped
+ [[ ! stopped == \s\t\o\p\p\e\d ]]
++ echo '{' '"upstream_id":' 7365351051259690270, '"namespace":' '"default",' '"id":' '"59d1b199-1746-4775-a56f-27961861224f",' '"state":' '"stopped",' '"checkpoint_tso":' 449545358108000260, '"checkpoint_time":' '"2024-05-05' '11:24:08.349",' '"error":' null '}'
++ jq -r .error.message
+ message=null
+ [[ ! null =~ null ]]
run task successfully
check_safepoint_equal http://127.0.0.1:2379 7365351051259690270
check diff failed 3-th time, retry later
[Sun May  5 11:24:10 CST 2024] <<<<<< START cdc server in mq_sink_dispatcher case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.1390413906.out server --log-file /tmp/tidb_cdc_test/mq_sink_dispatcher/cdc.log --log-level info --data-dir /tmp/tidb_cdc_test/mq_sink_dispatcher/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table ddl_manager.finish_mark not exists for 26-th check, retry later
+ set +x
[Sun May  5 11:24:11 CST 2024] <<<<<< START kafka consumer in resolve_lock case >>>>>>
go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e
go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5
go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754
go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d
go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4
go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f
go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5
go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b
go: downloading go.uber.org/zap v1.27.0
go: downloading github.com/BurntSushi/toml v1.3.2
go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1
go: downloading go.uber.org/atomic v1.11.0
go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b
go: downloading go.uber.org/multierr v1.11.0
go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c
go: downloading google.golang.org/grpc v1.62.1
go: downloading github.com/coreos/go-semver v0.3.1
go: downloading github.com/go-sql-driver/mysql v1.7.1
go: downloading github.com/google/uuid v1.6.0
go: downloading github.com/pkg/errors v0.9.1
go: downloading github.com/opentracing/opentracing-go v1.2.0
go: downloading go.etcd.io/etcd/api/v3 v3.5.12
go: downloading go.etcd.io/etcd/client/v3 v3.5.12
go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a
go: downloading github.com/golang/protobuf v1.5.4
go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548
go: downloading github.com/twmb/murmur3 v1.1.6
go: downloading github.com/prometheus/client_golang v1.19.0
safepoint is unexpected forward: 449545358108000258 449545356652838917 -> 449545358108000259 449545356652838917
run task failed 1-th time, retry later
go: downloading github.com/prometheus/client_model v0.6.1
go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0
go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13
go: downloading github.com/docker/go-units v0.5.0
go: downloading golang.org/x/sync v0.7.0
go: downloading github.com/gogo/protobuf v1.3.2
go: downloading github.com/google/btree v1.1.2
go: downloading google.golang.org/protobuf v1.33.0
go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec
go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda
go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12
go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda
go: downloading github.com/coreos/go-systemd/v22 v22.5.0
go: downloading github.com/cespare/xxhash/v2 v2.3.0
go: downloading github.com/beorn7/perks v1.0.1
go: downloading github.com/prometheus/common v0.52.2
go: downloading github.com/prometheus/procfs v0.13.0
go: downloading github.com/cloudfoundry/gosigar v1.3.6
go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8
go: downloading golang.org/x/sys v0.19.0
go: downloading golang.org/x/net v0.24.0
go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda
go: downloading golang.org/x/text v0.14.0
check diff successfully
go: downloading github.com/google/s2a-go v0.1.7
go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0
go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2
go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0
go: downloading github.com/felixge/httpsnoop v1.0.4
go: downloading github.com/jmespath/go-jmespath v0.4.0
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1f283c0013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3cmhg-gg08m, pid:6894, start at 2024-05-05 11:24:09.644844888 +0800 CST m=+5.183794267	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:26:09.651 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:24:09.615 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:14:09.615 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1f283c0013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3cmhg-gg08m, pid:6894, start at 2024-05-05 11:24:09.644844888 +0800 CST m=+5.183794267	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:26:09.651 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:24:09.615 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:14:09.615 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b1f2a840014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3cmhg-gg08m, pid:6970, start at 2024-05-05 11:24:09.789610383 +0800 CST m=+5.275924198	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:26:09.798 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:24:09.761 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:14:09.761 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/processor_stop_delay/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/processor_stop_delay/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/processor_stop_delay/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/processor_stop_delay/tiflash-proxy.toml"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/processor_stop_delay/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:24:12 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/ef3a5999-1c99-4623-ae26-76965b1deba3
	{"id":"ef3a5999-1c99-4623-ae26-76965b1deba3","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879449}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c78955d0
	ef3a5999-1c99-4623-ae26-76965b1deba3

/tidb/cdc/default/default/upstream/7365351094764639424
	{"id":7365351094764639424,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/ef3a5999-1c99-4623-ae26-76965b1deba3
	{"id":"ef3a5999-1c99-4623-ae26-76965b1deba3","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879449}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c78955d0
	ef3a5999-1c99-4623-ae26-76965b1deba3

/tidb/cdc/default/default/upstream/7365351094764639424
	{"id":7365351094764639424,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/ef3a5999-1c99-4623-ae26-76965b1deba3
	{"id":"ef3a5999-1c99-4623-ae26-76965b1deba3","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879449}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c78955d0
	ef3a5999-1c99-4623-ae26-76965b1deba3

/tidb/cdc/default/default/upstream/7365351094764639424
	{"id":7365351094764639424,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_basic.cli.7234.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-simple-basic-2953?protocol=simple' --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_basic/conf/changefeed.toml -c simple-basic
wait process cdc.test exit for 1-th time...
check_safepoint_equal http://127.0.0.1:2379 7365351051259690270
Create changefeed successfully!
ID: simple-basic
Info: {"upstream_id":7365351094764639424,"namespace":"default","id":"simple-basic","sink_uri":"kafka://127.0.0.1:9092/ticdc-simple-basic-2953?protocol=simple","create_time":"2024-05-05T11:24:13.094760588+08:00","start_ts":449545359314124804,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"simple","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":5,"send_bootstrap_in_msg_count":100,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"correctness","corruption_handle_level":"error"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545359314124804,"checkpoint_ts":449545359314124804,"checkpoint_time":"2024-05-05 11:24:12.950"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
table ddl_manager.finish_mark not exists for 27-th check, retry later
go: downloading github.com/modern-go/reflect2 v1.0.2
go: downloading github.com/json-iterator/go v1.1.12
go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:24:13 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/4cd20f40-9af9-459c-93bf-be67627a4d03
	{"id":"4cd20f40-9af9-459c-93bf-be67627a4d03","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879450}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c78dbecd
	4cd20f40-9af9-459c-93bf-be67627a4d03

/tidb/cdc/default/default/upstream/7365351090586786267
	{"id":7365351090586786267,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/4cd20f40-9af9-459c-93bf-be67627a4d03
	{"id":"4cd20f40-9af9-459c-93bf-be67627a4d03","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879450}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c78dbecd
	4cd20f40-9af9-459c-93bf-be67627a4d03

/tidb/cdc/default/default/upstream/7365351090586786267
	{"id":7365351090586786267,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/4cd20f40-9af9-459c-93bf-be67627a4d03
	{"id":"4cd20f40-9af9-459c-93bf-be67627a4d03","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879450}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c78dbecd
	4cd20f40-9af9-459c-93bf-be67627a4d03

/tidb/cdc/default/default/upstream/7365351090586786267
	{"id":7365351090586786267,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.cli.13972.out cli tso query --pd=http://127.0.0.1:2379
wait process cdc.test exit for 2-th time...
go: downloading github.com/coocood/freecache v1.2.1
go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50
go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible
go: downloading github.com/spf13/pflag v1.0.5
go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5
go: downloading github.com/jellydator/ttlcache/v3 v3.0.1
go: downloading github.com/shirou/gopsutil/v3 v3.24.2
go: downloading github.com/dgraph-io/ristretto v0.1.1
go: downloading github.com/opentracing/basictracer-go v1.1.0
go: downloading github.com/cockroachdb/errors v1.11.1
go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22
go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7
go: downloading github.com/influxdata/tdigest v0.0.1
go: downloading gopkg.in/yaml.v2 v2.4.0
go: downloading github.com/gorilla/mux v1.8.0
go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117
go: downloading github.com/stretchr/testify v1.9.0
go: downloading cloud.google.com/go/storage v1.39.1
go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1
go: downloading golang.org/x/tools v0.20.0
go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1
go: downloading github.com/dolthub/swiss v0.2.1
go: downloading github.com/golang/snappy v0.0.4
go: downloading github.com/scalalang2/golang-fifo v0.1.5
go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df
go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0
go: downloading github.com/aws/aws-sdk-go v1.50.0
go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581
go: downloading github.com/tidwall/btree v1.7.0
go: downloading github.com/go-resty/resty/v2 v2.11.0
go: downloading github.com/klauspost/compress v1.17.8
go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9
go: downloading go.uber.org/mock v0.4.0
go: downloading github.com/otiai10/copy v1.2.0
go: downloading github.com/cockroachdb/pebble v1.1.0
go: downloading github.com/carlmjohnson/flagext v0.21.0
go: downloading github.com/spkg/bom v1.0.0
go: downloading github.com/jfcg/sorty/v2 v2.1.0
go: downloading github.com/xitongsys/parquet-go v1.6.0
go: downloading golang.org/x/oauth2 v0.18.0
go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21
go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef
go: downloading google.golang.org/api v0.170.0
go: downloading golang.org/x/time v0.5.0
go: downloading github.com/sourcegraph/appdash v0.0.0-20190731080439-ebfcffb1b5c0
go: downloading github.com/cheggaaa/pb/v3 v3.0.8
go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2
go: downloading github.com/dolthub/maphash v0.1.0
go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2
go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358
go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815
go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4
go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f
go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc
go: downloading gopkg.in/yaml.v3 v3.0.1
go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2
go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1
go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1
go: downloading golang.org/x/crypto v0.22.0
go: downloading cloud.google.com/go v0.112.2
go: downloading github.com/jfcg/sixb v1.3.8
go: downloading github.com/robfig/cron/v3 v3.0.1
go: downloading github.com/fatih/color v1.16.0
go: downloading github.com/vbauerster/mpb/v7 v7.5.3
go: downloading golang.org/x/term v0.19.0
go: downloading github.com/spf13/cobra v1.8.0
go: downloading github.com/json-iterator/go v1.1.12
go: downloading github.com/robfig/cron v1.2.0
go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d
go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989
go: downloading github.com/apache/thrift v0.16.0
go: downloading github.com/VividCortex/ewma v1.2.0
go: downloading github.com/mattn/go-colorable v0.1.13
go: downloading github.com/mattn/go-isatty v0.0.20
go: downloading github.com/mattn/go-runewidth v0.0.15
go: downloading cloud.google.com/go/compute/metadata v0.2.3
go: downloading github.com/dustin/go-humanize v1.0.1
go: downloading github.com/golang/glog v1.2.0
go: downloading cloud.google.com/go/compute v1.25.1
go: downloading github.com/Masterminds/semver v1.5.0
go: downloading k8s.io/api v0.28.6
go: downloading github.com/emirpasic/gods v1.18.1
go: downloading github.com/cockroachdb/redact v1.1.5
go: downloading github.com/getsentry/sentry-go v0.27.0
go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b
go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible
go: downloading github.com/tklauser/go-sysconf v0.3.12
go: downloading github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d
go: downloading github.com/rivo/uniseg v0.4.7
go: downloading github.com/lestrrat-go/blackmagic v1.0.2
go: downloading github.com/lestrrat-go/httprc v1.0.5
go: downloading github.com/lestrrat-go/iter v1.0.2
go: downloading github.com/lestrrat-go/option v1.0.1
go: downloading github.com/kylelemons/godebug v1.1.0
go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c
go: downloading github.com/kr/pretty v0.3.1
go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd
go: downloading github.com/modern-go/reflect2 v1.0.2
go: downloading cloud.google.com/go/iam v1.1.7
go: downloading github.com/golang-jwt/jwt/v5 v5.2.0
go: downloading github.com/googleapis/gax-go/v2 v2.12.3
go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible
go: downloading github.com/lestrrat-go/httpcc v1.0.1
go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac
go: downloading go.opentelemetry.io/otel v1.24.0
go: downloading go.opentelemetry.io/otel/trace v1.24.0
go: downloading github.com/kr/text v0.2.0
go: downloading github.com/rogpeppe/go-internal v1.12.0
go: downloading github.com/tklauser/numcpus v0.6.1
go: downloading github.com/ncw/directio v1.0.5
go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2
go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64
go: downloading github.com/klauspost/cpuid v1.3.1
go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:24:14 CST 2024] <<<<<< run test case processor_err_chan success! >>>>>>
go: downloading github.com/go-logr/logr v1.4.1
go: downloading go.opentelemetry.io/otel/metric v1.24.0
go: downloading github.com/go-logr/stdr v1.2.2
go: downloading github.com/DataDog/zstd v1.5.5
go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06
go: downloading k8s.io/apimachinery v0.28.6
go: downloading k8s.io/klog/v2 v2.120.1
go: downloading github.com/google/gofuzz v1.2.0
go: downloading gopkg.in/inf.v0 v0.9.1
go: downloading sigs.k8s.io/structured-merge-diff/v4 v4.4.1
go: downloading sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd
go: downloading k8s.io/utils v0.0.0-20230726121419-3b25d923346b
+ set +x
[Sun May  5 11:24:14 CST 2024] <<<<<< START cdc server in processor_stop_delay case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/processorStopDelay=1*sleep(10000)'
+ (( i = 0 ))
+ (( i <= 50 ))
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.processor_stop_delay.84188420.out server --log-file /tmp/tidb_cdc_test/processor_stop_delay/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/processor_stop_delay/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
table ddl_manager.finish_mark not exists for 28-th check, retry later
+ set +x
+ tso='449545359531180036
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545359531180036 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.cli.14015.out cli changefeed create --start-ts=449545359531180036 '--sink-uri=kafka://127.0.0.1:9092/dispatcher-test?protocol=canal-json&enable-tidb-extension=true' -c test --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_dispatcher/conf/changefeed.toml
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_topics_v2/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
Create changefeed successfully!
ID: test
Info: {"upstream_id":7365351090586786267,"namespace":"default","id":"test","sink_uri":"kafka://127.0.0.1:9092/dispatcher-test?protocol=canal-json\u0026enable-tidb-extension=true","create_time":"2024-05-05T11:24:15.712860467+08:00","start_ts":449545359531180036,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"dispatchers":[{"matcher":["verify.t"],"partition":"index-value"},{"matcher":["dispatcher.index"],"partition":"index-value","index":"idx_a"}],"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545359531180036,"checkpoint_ts":449545359531180036,"checkpoint_time":"2024-05-05 11:24:13.778"}
PASS
coverage: 2.5% of statements in github.com/pingcap/tiflow/...
run task successfully
check_changefeed_state http://127.0.0.1:2379 59d1b199-1746-4775-a56f-27961861224f normal null
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=59d1b199-1746-4775-a56f-27961861224f
+ expected_state=normal
+ error_msg=null
+ tls_dir=null
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 59d1b199-1746-4775-a56f-27961861224f -s
start tidb cluster in /tmp/tidb_cdc_test/multi_topics_v2
Starting Upstream PD...
+ info='{
  "upstream_id": 7365351051259690270,
  "namespace": "default",
  "id": "59d1b199-1746-4775-a56f-27961861224f",
  "state": "normal",
  "checkpoint_tso": 449545358108000260,
  "checkpoint_time": "2024-05-05 11:24:08.349",
  "error": null
}'
+ echo '{
  "upstream_id": 7365351051259690270,
  "namespace": "default",
  "id": "59d1b199-1746-4775-a56f-27961861224f",
  "state": "normal",
  "checkpoint_tso": 449545358108000260,
  "checkpoint_time": "2024-05-05 11:24:08.349",
  "error": null
}'
{
  "upstream_id": 7365351051259690270,
  "namespace": "default",
  "id": "59d1b199-1746-4775-a56f-27961861224f",
  "state": "normal",
  "checkpoint_tso": 449545358108000260,
  "checkpoint_time": "2024-05-05 11:24:08.349",
  "error": null
}
++ echo '{' '"upstream_id":' 7365351051259690270, '"namespace":' '"default",' '"id":' '"59d1b199-1746-4775-a56f-27961861224f",' '"state":' '"normal",' '"checkpoint_tso":' 449545358108000260, '"checkpoint_time":' '"2024-05-05' '11:24:08.349",' '"error":' null '}'
++ jq -r .state
+ state=normal
+ [[ ! normal == \n\o\r\m\a\l ]]
++ echo '{' '"upstream_id":' 7365351051259690270, '"namespace":' '"default",' '"id":' '"59d1b199-1746-4775-a56f-27961861224f",' '"state":' '"normal",' '"checkpoint_tso":' 449545358108000260, '"checkpoint_time":' '"2024-05-05' '11:24:08.349",' '"error":' null '}'
++ jq -r .error.message
+ message=null
+ [[ ! null =~ null ]]
run task successfully
check_safepoint_forward http://127.0.0.1:2379 7365351051259690270 449545358108000259 449545358108000260
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
+ set +x
check_changefeed_state http://127.0.0.1:2379 test normal null
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=test
+ expected_state=normal
+ error_msg=null
+ tls_dir=null
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s
table ddl_manager.finish_mark not exists for 29-th check, retry later
+ info='{
  "upstream_id": 7365351090586786267,
  "namespace": "default",
  "id": "test",
  "state": "normal",
  "checkpoint_tso": 449545359531180036,
  "checkpoint_time": "2024-05-05 11:24:13.778",
  "error": null
}'
+ echo '{
  "upstream_id": 7365351090586786267,
  "namespace": "default",
  "id": "test",
  "state": "normal",
  "checkpoint_tso": 449545359531180036,
  "checkpoint_time": "2024-05-05 11:24:13.778",
  "error": null
}'
{
  "upstream_id": 7365351090586786267,
  "namespace": "default",
  "id": "test",
  "state": "normal",
  "checkpoint_tso": 449545359531180036,
  "checkpoint_time": "2024-05-05 11:24:13.778",
  "error": null
}
++ echo '{' '"upstream_id":' 7365351090586786267, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449545359531180036, '"checkpoint_time":' '"2024-05-05' '11:24:13.778",' '"error":' null '}'
++ jq -r .state
+ state=normal
+ [[ ! normal == \n\o\r\m\a\l ]]
++ echo '{' '"upstream_id":' 7365351090586786267, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449545359531180036, '"checkpoint_time":' '"2024-05-05' '11:24:13.778",' '"error":' null '}'
++ jq -r .error.message
+ message=null
+ [[ ! null =~ null ]]
run task successfully
check_changefeed_state http://127.0.0.1:2379 test failed ErrDispatcherFailed
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=test
+ expected_state=failed
+ error_msg=ErrDispatcherFailed
+ tls_dir=ErrDispatcherFailed
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
go: downloading github.com/google/s2a-go v0.1.7
go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0
go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2
go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0
go: downloading github.com/felixge/httpsnoop v1.0.4
go: downloading github.com/jmespath/go-jmespath v0.4.0
+ info='{
  "upstream_id": 7365351090586786267,
  "namespace": "default",
  "id": "test",
  "state": "normal",
  "checkpoint_tso": 449545359531180036,
  "checkpoint_time": "2024-05-05 11:24:13.778",
  "error": null
}'
+ echo '{
  "upstream_id": 7365351090586786267,
  "namespace": "default",
  "id": "test",
  "state": "normal",
  "checkpoint_tso": 449545359531180036,
  "checkpoint_time": "2024-05-05 11:24:13.778",
  "error": null
}'
{
  "upstream_id": 7365351090586786267,
  "namespace": "default",
  "id": "test",
  "state": "normal",
  "checkpoint_tso": 449545359531180036,
  "checkpoint_time": "2024-05-05 11:24:13.778",
  "error": null
}
++ echo '{' '"upstream_id":' 7365351090586786267, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449545359531180036, '"checkpoint_time":' '"2024-05-05' '11:24:13.778",' '"error":' null '}'
++ jq -r .state
+ state=normal
+ [[ ! normal == \f\a\i\l\e\d ]]
+ echo 'changefeed state normal does not equal to failed'
changefeed state normal does not equal to failed
+ exit 1
run task failed 1-th time, retry later
run task successfully
check_changefeed_state http://127.0.0.1:2379 59d1b199-1746-4775-a56f-27961861224f stopped null
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=59d1b199-1746-4775-a56f-27961861224f
+ expected_state=stopped
+ error_msg=null
+ tls_dir=null
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 59d1b199-1746-4775-a56f-27961861224f -s
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:24:17 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/ca9e87ce-bf36-4f0c-bff1-1b6cf377435b
	{"id":"ca9e87ce-bf36-4f0c-bff1-1b6cf377435b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879455}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c7a00dcb
	ca9e87ce-bf36-4f0c-bff1-1b6cf377435b

/tidb/cdc/default/default/upstream/7365351117028550251
	{"id":7365351117028550251,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/ca9e87ce-bf36-4f0c-bff1-1b6cf377435b
	{"id":"ca9e87ce-bf36-4f0c-bff1-1b6cf377435b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879455}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c7a00dcb
	ca9e87ce-bf36-4f0c-bff1-1b6cf377435b

/tidb/cdc/default/default/upstream/7365351117028550251
	{"id":7365351117028550251,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/ca9e87ce-bf36-4f0c-bff1-1b6cf377435b
	{"id":"ca9e87ce-bf36-4f0c-bff1-1b6cf377435b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879455}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c7a00dcb
	ca9e87ce-bf36-4f0c-bff1-1b6cf377435b

/tidb/cdc/default/default/upstream/7365351117028550251
	{"id":7365351117028550251,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ info='{
  "upstream_id": 7365351051259690270,
  "namespace": "default",
  "id": "59d1b199-1746-4775-a56f-27961861224f",
  "state": "stopped",
  "checkpoint_tso": 449545358108000260,
  "checkpoint_time": "2024-05-05 11:24:08.349",
  "error": null
}'
+ echo '{
  "upstream_id": 7365351051259690270,
  "namespace": "default",
  "id": "59d1b199-1746-4775-a56f-27961861224f",
  "state": "stopped",
  "checkpoint_tso": 449545358108000260,
  "checkpoint_time": "2024-05-05 11:24:08.349",
  "error": null
}'
{
  "upstream_id": 7365351051259690270,
  "namespace": "default",
  "id": "59d1b199-1746-4775-a56f-27961861224f",
  "state": "stopped",
  "checkpoint_tso": 449545358108000260,
  "checkpoint_time": "2024-05-05 11:24:08.349",
  "error": null
}
++ echo '{' '"upstream_id":' 7365351051259690270, '"namespace":' '"default",' '"id":' '"59d1b199-1746-4775-a56f-27961861224f",' '"state":' '"stopped",' '"checkpoint_tso":' 449545358108000260, '"checkpoint_time":' '"2024-05-05' '11:24:08.349",' '"error":' null '}'
++ jq -r .state
+ state=stopped
+ [[ ! stopped == \s\t\o\p\p\e\d ]]
++ echo '{' '"upstream_id":' 7365351051259690270, '"namespace":' '"default",' '"id":' '"59d1b199-1746-4775-a56f-27961861224f",' '"state":' '"stopped",' '"checkpoint_tso":' 449545358108000260, '"checkpoint_time":' '"2024-05-05' '11:24:08.349",' '"error":' null '}'
++ jq -r .error.message
+ message=null
+ [[ ! null =~ null ]]
run task successfully
[Sun May  5 11:24:18 CST 2024] <<<<<< START kafka consumer in processor_stop_delay case >>>>>>
check_changefeed_state http://127.0.0.1:2379 7115e660-16a8-45a3-9d30-6d5b6ec3d034 normal null
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=7115e660-16a8-45a3-9d30-6d5b6ec3d034
+ expected_state=normal
+ error_msg=null
+ tls_dir=null
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c 7115e660-16a8-45a3-9d30-6d5b6ec3d034 -s
+ info='{
  "upstream_id": 7365351051259690270,
  "namespace": "default",
  "id": "7115e660-16a8-45a3-9d30-6d5b6ec3d034",
  "state": "normal",
  "checkpoint_tso": 449545360690118659,
  "checkpoint_time": "2024-05-05 11:24:18.199",
  "error": null
}'
+ echo '{
  "upstream_id": 7365351051259690270,
  "namespace": "default",
  "id": "7115e660-16a8-45a3-9d30-6d5b6ec3d034",
  "state": "normal",
  "checkpoint_tso": 449545360690118659,
  "checkpoint_time": "2024-05-05 11:24:18.199",
  "error": null
}'
{
  "upstream_id": 7365351051259690270,
  "namespace": "default",
  "id": "7115e660-16a8-45a3-9d30-6d5b6ec3d034",
  "state": "normal",
  "checkpoint_tso": 449545360690118659,
  "checkpoint_time": "2024-05-05 11:24:18.199",
  "error": null
}
++ echo '{' '"upstream_id":' 7365351051259690270, '"namespace":' '"default",' '"id":' '"7115e660-16a8-45a3-9d30-6d5b6ec3d034",' '"state":' '"normal",' '"checkpoint_tso":' 449545360690118659, '"checkpoint_time":' '"2024-05-05' '11:24:18.199",' '"error":' null '}'
++ jq -r .state
+ state=normal
+ [[ ! normal == \n\o\r\m\a\l ]]
++ echo '{' '"upstream_id":' 7365351051259690270, '"namespace":' '"default",' '"id":' '"7115e660-16a8-45a3-9d30-6d5b6ec3d034",' '"state":' '"normal",' '"checkpoint_tso":' 449545360690118659, '"checkpoint_time":' '"2024-05-05' '11:24:18.199",' '"error":' null '}'
++ jq -r .error.message
+ message=null
+ [[ ! null =~ null ]]
run task successfully
check_safepoint_equal http://127.0.0.1:2379 7365351051259690270
table ddl_manager.finish_mark not exists for 30-th check, retry later
table processor_stop_delay.t not exists for 1-th check, retry later
[Sun May  5 11:24:19 CST 2024] <<<<<< START kafka consumer in kafka_simple_basic case >>>>>>
Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc)
3723625472 bytes in 27.19 secs (136961293 bytes/sec)
[Pipeline] {
[Pipeline] cache
check_changefeed_state http://127.0.0.1:2379 test failed ErrDispatcherFailed
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=test
+ expected_state=failed
+ error_msg=ErrDispatcherFailed
+ tls_dir=ErrDispatcherFailed
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s
+ info='{
  "upstream_id": 7365351090586786267,
  "namespace": "default",
  "id": "test",
  "state": "normal",
  "checkpoint_tso": 449545360514220053,
  "checkpoint_time": "2024-05-05 11:24:17.528",
  "error": null
}'
+ echo '{
  "upstream_id": 7365351090586786267,
  "namespace": "default",
  "id": "test",
  "state": "normal",
  "checkpoint_tso": 449545360514220053,
  "checkpoint_time": "2024-05-05 11:24:17.528",
  "error": null
}'
{
  "upstream_id": 7365351090586786267,
  "namespace": "default",
  "id": "test",
  "state": "normal",
  "checkpoint_tso": 449545360514220053,
  "checkpoint_time": "2024-05-05 11:24:17.528",
  "error": null
}
++ echo '{' '"upstream_id":' 7365351090586786267, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449545360514220053, '"checkpoint_time":' '"2024-05-05' '11:24:17.528",' '"error":' null '}'
++ jq -r .state
+ state=normal
+ [[ ! normal == \f\a\i\l\e\d ]]
+ echo 'changefeed state normal does not equal to failed'
changefeed state normal does not equal to failed
+ exit 1
run task failed 2-th time, retry later
table test.finish_mark_for_ddl not exists for 1-th check, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table ddl_manager.finish_mark not exists for 31-th check, retry later
table processor_stop_delay.t exists
check diff failed 1-th time, retry later
run task successfully
Changefeed remove successfully.
ID: 59d1b199-1746-4775-a56f-27961861224f
CheckpointTs: 449545358108000260
SinkURI: kafka://127.0.0.1:9092/ticdc-gc-safepoint-2848?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760
check_safepoint_forward http://127.0.0.1:2379 7365351051259690270 449545361515610117 449545358108000260 449545360690118659
table test.finish_mark_for_ddl not exists for 2-th check, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
run task successfully
table ddl_manager.finish_mark not exists for 32-th check, retry later
check diff successfully
Changefeed remove successfully.
ID: 7115e660-16a8-45a3-9d30-6d5b6ec3d034
CheckpointTs: 449545361777754117
SinkURI: kafka://127.0.0.1:9092/ticdc-gc-safepoint-2848?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760
check_safepoint_cleared http://127.0.0.1:2379 7365351051259690270
run task successfully
wait process cdc.test exit for 1-th time...
check_changefeed_state http://127.0.0.1:2379 test failed ErrDispatcherFailed
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=test
+ expected_state=failed
+ error_msg=ErrDispatcherFailed
+ tls_dir=ErrDispatcherFailed
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s
+ info='{
  "upstream_id": 7365351090586786267,
  "namespace": "default",
  "id": "test",
  "state": "failed",
  "checkpoint_tso": 449545360514220053,
  "checkpoint_time": "2024-05-05 11:24:17.528",
  "error": {
    "time": "2024-05-05T11:24:21.160875114+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrDispatcherFailed",
    "message": "[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a"
  }
}'
+ echo '{
  "upstream_id": 7365351090586786267,
  "namespace": "default",
  "id": "test",
  "state": "failed",
  "checkpoint_tso": 449545360514220053,
  "checkpoint_time": "2024-05-05 11:24:17.528",
  "error": {
    "time": "2024-05-05T11:24:21.160875114+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrDispatcherFailed",
    "message": "[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a"
  }
}'
{
  "upstream_id": 7365351090586786267,
  "namespace": "default",
  "id": "test",
  "state": "failed",
  "checkpoint_tso": 449545360514220053,
  "checkpoint_time": "2024-05-05 11:24:17.528",
  "error": {
    "time": "2024-05-05T11:24:21.160875114+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrDispatcherFailed",
    "message": "[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a"
  }
}
++ jq -r .state
++ echo '{' '"upstream_id":' 7365351090586786267, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"failed",' '"checkpoint_tso":' 449545360514220053, '"checkpoint_time":' '"2024-05-05' '11:24:17.528",' '"error":' '{' '"time":' '"2024-05-05T11:24:21.160875114+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrDispatcherFailed",' '"message":' '"[CDC:ErrDispatcherFailed]index' not found when dispatch event, table: index, index: 'idx_a"' '}' '}'
+ state=failed
+ [[ ! failed == \f\a\i\l\e\d ]]
++ jq -r .error.message
++ echo '{' '"upstream_id":' 7365351090586786267, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"failed",' '"checkpoint_tso":' 449545360514220053, '"checkpoint_time":' '"2024-05-05' '11:24:17.528",' '"error":' '{' '"time":' '"2024-05-05T11:24:21.160875114+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrDispatcherFailed",' '"message":' '"[CDC:ErrDispatcherFailed]index' not found when dispatch event, table: index, index: 'idx_a"' '}' '}'
+ message='[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a'
+ [[ ! [CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a =~ ErrDispatcherFailed ]]
run task successfully
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.cli.14288.out cli changefeed update -c test '--sink-uri=kafka://127.0.0.1:9092/dispatcher-test?protocol=canal-json&enable-tidb-extension=true' --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_dispatcher/conf/new_changefeed.toml --no-confirm
wait process cdc.test exit for 2-th time...
table test.finish_mark_for_ddl not exists for 3-th check, retry later
Diff of changefeed config:
{Type:update Path:[Config SyncPointInterval] From:<nil> To:0xc0015177f8}
{Type:update Path:[Config SyncPointRetention] From:<nil> To:0xc001517818}
{Type:update Path:[Config Sink DispatchRules 0 Matcher 0] From:verify.t To:dispatcher.index}
{Type:delete Path:[Config Sink DispatchRules 1 Matcher 0] From:dispatcher.index To:<nil>}
{Type:delete Path:[Config Sink DispatchRules 1 PartitionRule] From:index-value To:<nil>}
{Type:delete Path:[Config Sink DispatchRules 1 IndexName] From:idx_a To:<nil>}
{Type:update Path:[Config Consistent] From:<nil> To:0xc0013d0690}
Update changefeed config successfully! 
ID: test
Info: {"upstream_id":7365351090586786267,"namespace":"default","id":"test","sink_uri":"kafka://127.0.0.1:9092/dispatcher-test?protocol=canal-json\u0026enable-tidb-extension=true","create_time":"2024-05-05T11:24:15.712860467+08:00","start_ts":449545359531180036,"admin_job_type":1,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_table_monitor":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","dispatchers":[{"matcher":["dispatcher.index"],"partition":"index-value"}],"encoder_concurrency":32,"terminator":"\r\n","enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"failed","error":{"addr":"127.0.0.1:8300","code":"CDC:ErrDispatcherFailed","message":"[CDC:ErrDispatcherFailed]index not found when dispatch event, table: index, index: idx_a"},"creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":0,"checkpoint_ts":449545360514220053,"checkpoint_time":"2024-05-05 11:24:17.528"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:24:25 CST 2024] <<<<<< run test case gc_safepoint success! >>>>>>
table ddl_manager.finish_mark not exists for 33-th check, retry later
check diff failed 1-th time, retry later
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.mq_sink_dispatcher.cli.14320.out cli changefeed resume -c test
table test.finish_mark_for_ddl not exists for 4-th check, retry later
PASS
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table ddl_manager.finish_mark not exists for 34-th check, retry later
coverage: 2.1% of statements in github.com/pingcap/tiflow/...
check diff failed 2-th time, retry later
table test.finish_mark_for_ddl not exists for 5-th check, retry later
+ set +x
check_changefeed_state http://127.0.0.1:2379 test normal null
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=test
+ expected_state=normal
+ error_msg=null
+ tls_dir=null
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c test -s
+ info='{
  "upstream_id": 7365351090586786267,
  "namespace": "default",
  "id": "test",
  "state": "normal",
  "checkpoint_tso": 449545360514220053,
  "checkpoint_time": "2024-05-05 11:24:17.528",
  "error": null
}'
+ echo '{
  "upstream_id": 7365351090586786267,
  "namespace": "default",
  "id": "test",
  "state": "normal",
  "checkpoint_tso": 449545360514220053,
  "checkpoint_time": "2024-05-05 11:24:17.528",
  "error": null
}'
{
  "upstream_id": 7365351090586786267,
  "namespace": "default",
  "id": "test",
  "state": "normal",
  "checkpoint_tso": 449545360514220053,
  "checkpoint_time": "2024-05-05 11:24:17.528",
  "error": null
}
++ echo '{' '"upstream_id":' 7365351090586786267, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449545360514220053, '"checkpoint_time":' '"2024-05-05' '11:24:17.528",' '"error":' null '}'
++ jq -r .state
+ state=normal
+ [[ ! normal == \n\o\r\m\a\l ]]
++ echo '{' '"upstream_id":' 7365351090586786267, '"namespace":' '"default",' '"id":' '"test",' '"state":' '"normal",' '"checkpoint_tso":' 449545360514220053, '"checkpoint_time":' '"2024-05-05' '11:24:17.528",' '"error":' null '}'
++ jq -r .error.message
+ message=null
+ [[ ! null =~ null ]]
run task successfully
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2046240008	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-2hg1d-32fk1, pid:9032, start at 2024-05-05 11:24:27.922750409 +0800 CST m=+5.268102448	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:26:27.929 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:24:27.913 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:14:27.913 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_reconstruct/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
table ddl_manager.finish_mark not exists for 35-th check, retry later
table test.finish_mark not exists for 1-th check, retry later
check diff failed 3-th time, retry later
start tidb cluster in /tmp/tidb_cdc_test/changefeed_reconstruct
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table test.finish_mark_for_ddl exists
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_basic.cli.7361.out cli changefeed pause -c simple-basic
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2046240008	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-2hg1d-32fk1, pid:9032, start at 2024-05-05 11:24:27.922750409 +0800 CST m=+5.268102448	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:26:27.929 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:24:27.913 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:14:27.913 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2046340003	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-2hg1d-32fk1, pid:9116, start at 2024-05-05 11:24:27.921605822 +0800 CST m=+5.214920670	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:26:27.928 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:24:27.917 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:14:27.917 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/error.log
arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_topics_v2/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_topics_v2/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table ddl_manager.finish_mark not exists for 36-th check, retry later
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
table test.finish_mark not exists for 2-th check, retry later
check diff failed 4-th time, retry later
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_basic.cli.7397.out cli changefeed resume -c simple-basic
table ddl_manager.finish_mark not exists for 37-th check, retry later
PASS
coverage: 2.1% of statements in github.com/pingcap/tiflow/...
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_topics_v2.cli.10519.out cli tso query --pd=http://127.0.0.1:2379
table test.finish_mark exists
check diff successfully
wait process cdc.test exit for 1-th time...
check diff failed 5-th time, retry later
table sink_retry.finish_mark_1 exists
check diff successfully
***************** properties *****************
"mysql.port"="4000"
"mysql.user"="root"
"updateproportion"="0"
"readproportion"="0"
"threadcount"="2"
"insertproportion"="0"
"requestdistribution"="uniform"
"recordcount"="10"
"dotransactions"="false"
"readallfields"="true"
"operationcount"="0"
"mysql.host"="127.0.0.1"
"scanproportion"="0"
"workload"="core"
"mysql.db"="sink_retry"
**********************************************
Run finished, takes 4.147967ms
INSERT - Takes(s): 0.0, Count: 10, OPS: 3211.9, Avg(us): 694, Min(us): 453, Max(us): 1570, 95th(us): 2000, 99th(us): 2000
wait process cdc.test exit for 2-th time...
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:24:34 CST 2024] <<<<<< run test case mq_sink_dispatcher success! >>>>>>
+ set +x
+ tso='449545364639318017
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545364639318017 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
[Sun May  5 11:24:34 CST 2024] <<<<<< START cdc server in multi_topics_v2 case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ GO_FAILPOINTS=
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_topics_v2.1055810560.out server --log-file /tmp/tidb_cdc_test/multi_topics_v2/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_topics_v2/cdc_data --cluster-id default
+ [[ no != \n\o ]]
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
+ set +x
table ddl_manager.finish_mark not exists for 38-th check, retry later
check diff failed 6-th time, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
check diff successfully
wait process cdc.test exit for 1-th time...
table ddl_manager.finish_mark not exists for 39-th check, retry later
wait process cdc.test exit for 2-th time...
check diff successfully
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:24:37 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/45c62b40-86cb-4a7f-a23f-a19beb31a8b2
	{"id":"45c62b40-86cb-4a7f-a23f-a19beb31a8b2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879475}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c7e52ed4
	45c62b40-86cb-4a7f-a23f-a19beb31a8b2

/tidb/cdc/default/default/upstream/7365351195940063002
	{"id":7365351195940063002,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/45c62b40-86cb-4a7f-a23f-a19beb31a8b2
	{"id":"45c62b40-86cb-4a7f-a23f-a19beb31a8b2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879475}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c7e52ed4
	45c62b40-86cb-4a7f-a23f-a19beb31a8b2

/tidb/cdc/default/default/upstream/7365351195940063002
	{"id":7365351195940063002,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/45c62b40-86cb-4a7f-a23f-a19beb31a8b2
	{"id":"45c62b40-86cb-4a7f-a23f-a19beb31a8b2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879475}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c7e52ed4
	45c62b40-86cb-4a7f-a23f-a19beb31a8b2

/tidb/cdc/default/default/upstream/7365351195940063002
	{"id":7365351195940063002,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_topics_v2.cli.10614.out cli changefeed create --start-ts=449545364639318017 '--sink-uri=kafka://127.0.0.1:9092/multi_topics?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1' --config /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_topics_v2/conf/changefeed.toml
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:24:38 CST 2024] <<<<<< run test case kv_client_stream_reconnect success! >>>>>>
wait process cdc.test exit for 1-th time...
Create changefeed successfully!
ID: 0d0de0b8-47a7-474e-a01f-54c30a032485
Info: {"upstream_id":7365351195940063002,"namespace":"default","id":"0d0de0b8-47a7-474e-a01f-54c30a032485","sink_uri":"kafka://127.0.0.1:9092/multi_topics?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1","create_time":"2024-05-05T11:24:38.297271917+08:00","start_ts":449545364639318017,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"dispatchers":[{"matcher":["test.*"],"topic":"{schema}_{table}"}],"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":true,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545364639318017,"checkpoint_ts":449545364639318017,"checkpoint_time":"2024-05-05 11:24:33.264"}
PASS
coverage: 2.5% of statements in github.com/pingcap/tiflow/...
wait process cdc.test exit for 2-th time...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table ddl_manager.finish_mark not exists for 40-th check, retry later
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:24:39 CST 2024] <<<<<< run test case processor_stop_delay success! >>>>>>
+ set +x
table test.finish_mark not exists for 1-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table ddl_manager.finish_mark not exists for 41-th check, retry later
table test.finish_mark not exists for 2-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b211b840014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f, pid:4322, start at 2024-05-05 11:24:41.613242265 +0800 CST m=+5.146890833	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:26:41.619 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:24:41.620 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:14:41.620 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b211b840014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f, pid:4322, start at 2024-05-05 11:24:41.613242265 +0800 CST m=+5.146890833	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:26:41.619 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:24:41.620 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:14:41.620 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b211d340013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f, pid:4406, start at 2024-05-05 11:24:41.7056246 +0800 CST m=+5.179013517	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:26:41.711 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:24:41.677 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:14:41.677 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/changefeed_reconstruct/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/changefeed_reconstruct/tiflash/log/error.log
arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_reconstruct/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_reconstruct/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_reconstruct/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc)
3723625472 bytes in 20.29 secs (183521173 bytes/sec)
[Pipeline] {
[Pipeline] cache
table ddl_manager.finish_mark not exists for 42-th check, retry later
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_pause_resume/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
table test.finish_mark not exists for 3-th check, retry later
table ddl_manager.finish_mark not exists for 43-th check, retry later
[Sun May  5 11:24:44 CST 2024] <<<<<< START cdc server in changefeed_reconstruct case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ GO_FAILPOINTS=
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_reconstruct.58455847.out server --log-file /tmp/tidb_cdc_test/changefeed_reconstruct/cdcserver1.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_reconstruct/cdc_dataserver1 --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
table test.finish_mark not exists for 4-th check, retry later
start tidb cluster in /tmp/tidb_cdc_test/changefeed_pause_resume
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/mq_sink_dispatcher/run.sh: line 1: 14395 Killed                  cdc_kafka_consumer --upstream-uri $SINK_URI --downstream-uri="mysql://root@127.0.0.1:3306/?safe-mode=true&batch-dml-enable=false" --upstream-tidb-dsn="root@tcp(${UP_TIDB_HOST}:${UP_TIDB_PORT})/?" --config="$CUR/conf/new_changefeed.toml" 2>&1
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_column_selector/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
table ddl_manager.finish_mark not exists for 44-th check, retry later
table test.finish_mark not exists for 5-th check, retry later
Verifying downstream PD is started...
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:24:47 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/21bfc749-c389-4456-a89f-b1e854e89329
	{"id":"21bfc749-c389-4456-a89f-b1e854e89329","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879485}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c81b1ec3
	21bfc749-c389-4456-a89f-b1e854e89329

/tidb/cdc/default/default/upstream/7365351255778119562
	{"id":7365351255778119562,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/21bfc749-c389-4456-a89f-b1e854e89329
	{"id":"21bfc749-c389-4456-a89f-b1e854e89329","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879485}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c81b1ec3
	21bfc749-c389-4456-a89f-b1e854e89329

/tidb/cdc/default/default/upstream/7365351255778119562
	{"id":7365351255778119562,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ grep -q 'etcd info'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/21bfc749-c389-4456-a89f-b1e854e89329
	{"id":"21bfc749-c389-4456-a89f-b1e854e89329","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879485}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c81b1ec3
	21bfc749-c389-4456-a89f-b1e854e89329

/tidb/cdc/default/default/upstream/7365351255778119562
	{"id":7365351255778119562,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ break
+ set +x
[Sun May  5 11:24:48 CST 2024] <<<<<< START kafka consumer in changefeed_reconstruct case >>>>>>
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table ddl_manager.finish_mark not exists for 45-th check, retry later
start tidb cluster in /tmp/tidb_cdc_test/kafka_column_selector
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
***************** properties *****************
"workload"="core"
"insertproportion"="0"
"mysql.host"="127.0.0.1"
"mysql.db"="changefeed_reconstruct"
"threadcount"="4"
"updateproportion"="0"
"scanproportion"="0"
"operationcount"="0"
"readproportion"="0"
"recordcount"="50"
"mysql.port"="4000"
"dotransactions"="false"
"requestdistribution"="uniform"
"readallfields"="true"
"mysql.user"="root"
**********************************************
Run finished, takes 24.841783ms
INSERT - Takes(s): 0.0, Count: 48, OPS: 2441.0, Avg(us): 1915, Min(us): 1133, Max(us): 5070, 95th(us): 6000, 99th(us): 6000
table changefeed_reconstruct.usertable not exists for 1-th check, retry later
table test.finish_mark not exists for 6-th check, retry later
table ddl_manager.finish_mark not exists for 46-th check, retry later
table changefeed_reconstruct.usertable exists
table test.finish_mark not exists for 7-th check, retry later
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/partition_table/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
Verifying downstream PD is started...
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table ddl_manager.finish_mark not exists for 47-th check, retry later
check diff successfully
table test.finish_mark not exists for 8-th check, retry later
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/cdc/run.sh using Sink-Type: kafka... <<=================
wait process 5850 exit for 1-th time...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
wait process 5850 exit for 2-th time...
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils/kill_cdc_pid: line 19: kill: (5850) - No such process
wait process 5850 exit for 3-th time...
process 5850 already exit
check_no_capture http://127.0.0.1:2379
parse error: Invalid numeric literal at line 1, column 6
run task successfully
[Sun May  5 11:24:54 CST 2024] <<<<<< START cdc server in changefeed_reconstruct case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ GO_FAILPOINTS=
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_reconstruct.61236125.out server --log-file /tmp/tidb_cdc_test/changefeed_reconstruct/cdcserver2.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_reconstruct/cdc_dataserver2 --cluster-id default --addr 127.0.0.1:8300
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
start tidb cluster in /tmp/tidb_cdc_test/partition_table
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table ddl_manager.finish_mark not exists for 48-th check, retry later
The 1 times to try to start tidb cluster...
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish_mark not exists for 9-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table ddl_manager.finish_mark not exists for 49-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish_mark not exists for 10-th check, retry later
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:24:57 GMT
< Content-Type: text/plain; charset=utf-8
< Transfer-Encoding: chunked
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:

changefeedID: default/2868b7f0-dbef-4e35-906e-8959d8eb4fcc
{UpstreamID:7365351255778119562 Namespace:default ID:2868b7f0-dbef-4e35-906e-8959d8eb4fcc SinkURI:kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-31505?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-05 11:24:48.154234948 +0800 CST StartTs:449545368507514884 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0017b5950 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545368533729285}
{CheckpointTs:449545370735476741 MinTableBarrierTs:449545370735476741 AdminJobType:noop}
span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449545370997882884, checkpointTs: 449545370735476741, state: Replicating



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/363ed1ef-638f-4c9b-8989-f2a1722a50cc
	{"id":"363ed1ef-638f-4c9b-8989-f2a1722a50cc","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879494}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c81b1f62
	363ed1ef-638f-4c9b-8989-f2a1722a50cc

/tidb/cdc/default/default/changefeed/info/2868b7f0-dbef-4e35-906e-8959d8eb4fcc
	{"upstream-id":7365351255778119562,"namespace":"default","changefeed-id":"2868b7f0-dbef-4e35-906e-8959d8eb4fcc","sink-uri":"kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-31505?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:24:48.154234948+08:00","start-ts":449545368507514884,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545368533729285}

/tidb/cdc/default/default/changefeed/status/2868b7f0-dbef-4e35-906e-8959d8eb4fcc
	{"checkpoint-ts":449545370735476741,"min-table-barrier-ts":449545370735476741,"admin-job-type":0}

/tidb/cdc/default/default/task/position/363ed1ef-638f-4c9b-8989-f2a1722a50cc/2868b7f0-dbef-4e35-906e-8959d8eb4fcc
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/upstream/7365351255778119562
	{"id":7365351255778119562,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:

changefeedID: default/2868b7f0-dbef-4e35-906e-8959d8eb4fcc
{UpstreamID:7365351255778119562 Namespace:default ID:2868b7f0-dbef-4e35-906e-8959d8eb4fcc SinkURI:kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-31505?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-05 11:24:48.154234948 +0800 CST StartTs:449545368507514884 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0017b5950 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545368533729285}
{CheckpointTs:449545370735476741 MinTableBarrierTs:449545370735476741 AdminJobType:noop}
span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449545370997882884, checkpointTs: 449545370735476741, state: Replicating



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/363ed1ef-638f-4c9b-8989-f2a1722a50cc
	{"id":"363ed1ef-638f-4c9b-8989-f2a1722a50cc","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879494}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c81b1f62
	363ed1ef-638f-4c9b-8989-f2a1722a50cc

/tidb/cdc/default/default/changefeed/info/2868b7f0-dbef-4e35-906e-8959d8eb4fcc
	{"upstream-id":7365351255778119562,"namespace":"default","changefeed-id":"2868b7f0-dbef-4e35-906e-8959d8eb4fcc","sink-uri":"kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-31505?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:24:48.154234948+08:00","start-ts":449545368507514884,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545368533729285}

/tidb/cdc/default/default/changefeed/status/2868b7f0-dbef-4e35-906e-8959d8eb4fcc
	{"checkpoint-ts":449545370735476741,"min-table-barrier-ts":449545370735476741,"admin-job-type":0}

/tidb/cdc/default/default/task/position/363ed1ef-638f-4c9b-8989-f2a1722a50cc/2868b7f0-dbef-4e35-906e-8959d8eb4fcc
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/upstream/7365351255778119562
	{"id":7365351255778119562,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:

changefeedID: default/2868b7f0-dbef-4e35-906e-8959d8eb4fcc
{UpstreamID:7365351255778119562 Namespace:default ID:2868b7f0-dbef-4e35-906e-8959d8eb4fcc SinkURI:kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-31505?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-05 11:24:48.154234948 +0800 CST StartTs:449545368507514884 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0017b5950 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545368533729285}
{CheckpointTs:449545370735476741 MinTableBarrierTs:449545370735476741 AdminJobType:noop}
span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449545370997882884, checkpointTs: 449545370735476741, state: Replicating



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/363ed1ef-638f-4c9b-8989-f2a1722a50cc
	{"id":"363ed1ef-638f-4c9b-8989-f2a1722a50cc","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879494}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c81b1f62
	363ed1ef-638f-4c9b-8989-f2a1722a50cc

/tidb/cdc/default/default/changefeed/info/2868b7f0-dbef-4e35-906e-8959d8eb4fcc
	{"upstream-id":7365351255778119562,"namespace":"default","changefeed-id":"2868b7f0-dbef-4e35-906e-8959d8eb4fcc","sink-uri":"kafka://127.0.0.1:9092/ticdc-changefeed-reconstruct-31505?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:24:48.154234948+08:00","start-ts":449545368507514884,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545368533729285}

/tidb/cdc/default/default/changefeed/status/2868b7f0-dbef-4e35-906e-8959d8eb4fcc
	{"checkpoint-ts":449545370735476741,"min-table-barrier-ts":449545370735476741,"admin-job-type":0}

/tidb/cdc/default/default/task/position/363ed1ef-638f-4c9b-8989-f2a1722a50cc/2868b7f0-dbef-4e35-906e-8959d8eb4fcc
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warnin+ grep -q 'etcd info'
g":null}

/tidb/cdc/default/default/upstream/7365351255778119562
	{"id":7365351255778119562,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ break
+ set +x
cdc.test cli capture list --pd=http://127.0.0.1:2379 2>&1 | grep id
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b221a380004	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3, pid:4928, start at 2024-05-05 11:24:57.872322387 +0800 CST m=+5.201336174	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:26:57.881 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:24:57.870 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:14:57.870 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
    "id": "363ed1ef-638f-4c9b-8989-f2a1722a50cc",
    "cluster-id": "default"
run task successfully
table ddl_manager.finish_mark not exists for 50-th check, retry later
table test.finish_mark not exists for 11-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Starting Upstream TiDB...
capture_id: 363ed1ef-638f-4c9b-8989-f2a1722a50cc
check_processor_table_count http://127.0.0.1:2379 2868b7f0-dbef-4e35-906e-8959d8eb4fcc 363ed1ef-638f-4c9b-8989-f2a1722a50cc 1
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
run task successfully
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b221a380004	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3, pid:4928, start at 2024-05-05 11:24:57.872322387 +0800 CST m=+5.201336174	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:26:57.881 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:24:57.870 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:14:57.870 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b221c800006	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3, pid:5014, start at 2024-05-05 11:24:58.02415163 +0800 CST m=+5.295891202	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:26:58.030 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:24:58.016 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:14:58.016 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/log/error.log
arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_pause_resume/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_pause_resume/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table ddl_manager.finish_mark not exists for 51-th check, retry later
check_processor_table_count http://127.0.0.1:2379 2868b7f0-dbef-4e35-906e-8959d8eb4fcc 363ed1ef-638f-4c9b-8989-f2a1722a50cc 0
table test.finish_mark not exists for 12-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2246080018	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd, pid:15580, start at 2024-05-05 11:25:00.707324948 +0800 CST m=+5.155771040	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:27:00.714 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:25:00.674 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:15:00.674 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2246080018	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd, pid:15580, start at 2024-05-05 11:25:00.707324948 +0800 CST m=+5.155771040	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:27:00.714 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:25:00.674 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:15:00.674 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2246bc0017	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd, pid:15668, start at 2024-05-05 11:25:00.753955065 +0800 CST m=+5.150362213	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:27:00.760 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:25:00.719 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:15:00.719 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
table count 1 does equal to expected count 0
run task failed 1-th time, retry later
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/kafka_column_selector/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/kafka_column_selector/tiflash/log/error.log
arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_column_selector/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_column_selector/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_column_selector/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[Sun May  5 11:25:02 CST 2024] <<<<<< START cdc server in changefeed_pause_resume case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ GO_FAILPOINTS=
+ (( i = 0 ))
+ (( i <= 50 ))
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_pause_resume.63556357.out server --log-file /tmp/tidb_cdc_test/changefeed_pause_resume/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_pause_resume/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
table ddl_manager.finish_mark not exists for 52-th check, retry later
table test.finish_mark not exists for 13-th check, retry later
start tidb cluster in /tmp/tidb_cdc_test/cdc
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
check_processor_table_count http://127.0.0.1:2379 2868b7f0-dbef-4e35-906e-8959d8eb4fcc 363ed1ef-638f-4c9b-8989-f2a1722a50cc 0
run task successfully
[Sun May  5 11:25:03 CST 2024] <<<<<< START cdc server in kafka_column_selector case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ GO_FAILPOINTS=
+ (( i = 0 ))
+ (( i <= 50 ))
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_column_selector.1708917091.out server --log-file /tmp/tidb_cdc_test/kafka_column_selector/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_column_selector/cdc_data --cluster-id default
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table ddl_manager.finish_mark not exists for 53-th check, retry later
Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc)
3723625472 bytes in 19.43 secs (191623815 bytes/sec)
[Pipeline] {
[Pipeline] cache
***************** properties *****************
"mysql.user"="root"
"dotransactions"="false"
"operationcount"="0"
"scanproportion"="0"
"readallfields"="true"
"insertproportion"="0"
"workload"="core"
"readproportion"="0"
"requestdistribution"="uniform"
"mysql.host"="127.0.0.1"
"mysql.db"="changefeed_reconstruct"
"recordcount"="50"
"mysql.port"="4000"
"updateproportion"="0"
"threadcount"="4"
**********************************************
Run finished, takes 24.103633ms
INSERT - Takes(s): 0.0, Count: 48, OPS: 3332.4, Avg(us): 1950, Min(us): 947, Max(us): 9699, 95th(us): 10000, 99th(us): 10000
table changefeed_reconstruct.usertable not exists for 1-th check, retry later
table sink_retry.finish_mark_2 exists
check diff successfully
wait process cdc.test exit for 1-th time...
wait process cdc.test exit for 2-th time...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:25:04 CST 2024] <<<<<< run test case sink_retry success! >>>>>>
table test.finish_mark exists
check diff successfully
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:25:06 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/4bf7f788-f3a7-44e8-9955-d09791173643
	{"id":"4bf7f788-f3a7-44e8-9955-d09791173643","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879503}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8580ed4
	4bf7f788-f3a7-44e8-9955-d09791173643

/tidb/cdc/default/default/upstream/7365351311675050020
	{"id":7365351311675050020,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/4bf7f788-f3a7-44e8-9955-d09791173643
	{"id":"4bf7f788-f3a7-44e8-9955-d09791173643","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879503}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8580ed4
	4bf7f788-f3a7-44e8-9955-d09791173643

/tidb/cdc/default/default/upstream/7365351311675050020
	{"id":7365351311675050020,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/4bf7f788-f3a7-44e8-9955-d09791173643
	{"id":"4bf7f788-f3a7-44e8-9955-d09791173643","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879503}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8580ed4
	4bf7f788-f3a7-44e8-9955-d09791173643

/tidb/cdc/default/default/upstream/7365351311675050020
	{"id":7365351311675050020,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
[Sun May  5 11:25:06 CST 2024] <<<<<< START kafka consumer in changefeed_pause_resume case >>>>>>
table changefeed_pause_resume.t1 not exists for 1-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2296a80004	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3cmhg-gg08m, pid:10070, start at 2024-05-05 11:25:05.839927447 +0800 CST m=+5.361304005	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:27:05.847 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:25:05.834 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:15:05.834 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table ddl_manager.finish_mark not exists for 54-th check, retry later
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:25:07 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/42cef0a4-92ff-4b28-975a-5544597aeb5e
	{"id":"42cef0a4-92ff-4b28-975a-5544597aeb5e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879504}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8658acb
	42cef0a4-92ff-4b28-975a-5544597aeb5e

/tidb/cdc/default/default/upstream/7365351329034020626
	{"id":7365351329034020626,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/42cef0a4-92ff-4b28-975a-5544597aeb5e
	{"id":"42cef0a4-92ff-4b28-975a-5544597aeb5e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879504}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8658acb
	42cef0a4-92ff-4b28-975a-5544597aeb5e

/tidb/cdc/default/default/upstream/7365351329034020626
	{"id":7365351329034020626,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/42cef0a4-92ff-4b28-975a-5544597aeb5e
	{"id":"42cef0a4-92ff-4b28-975a-5544597aeb5e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879504}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8658acb
	42cef0a4-92ff-4b28-975a-5544597aeb5e

/tidb/cdc/default/default/upstream/7365351329034020626
	{"id":7365351329034020626,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_column_selector.cli.17152.out cli changefeed create --start-ts=449545372676653057 '--sink-uri=kafka://127.0.0.1:9092/column-selector-test?protocol=canal-json&partition-num=1&enable-tidb-extension=true' -c test --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_column_selector/conf/changefeed.toml
Create changefeed successfully!
ID: test
Info: {"upstream_id":7365351329034020626,"namespace":"default","id":"test","sink_uri":"kafka://127.0.0.1:9092/column-selector-test?protocol=canal-json\u0026partition-num=1\u0026enable-tidb-extension=true","create_time":"2024-05-05T11:25:07.496528465+08:00","start_ts":449545372676653057,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"column_selectors":[{"matcher":["test.t1"],"columns":["a","b"]},{"matcher":["test.*"],"columns":["*","!b"]},{"matcher":["test1.t1"],"columns":["column*","!column1"]}],"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545372676653057,"checkpoint_ts":449545372676653057,"checkpoint_time":"2024-05-05 11:25:03.924"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table changefeed_reconstruct.usertable exists
check diff failed 1-th time, retry later
table changefeed_pause_resume.t1 exists
table changefeed_pause_resume.t2 not exists for 1-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2296a80004	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3cmhg-gg08m, pid:10070, start at 2024-05-05 11:25:05.839927447 +0800 CST m=+5.361304005	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:27:05.847 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:25:05.834 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:15:05.834 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2297680015	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3cmhg-gg08m, pid:10155, start at 2024-05-05 11:25:05.918714176 +0800 CST m=+5.374815226	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:27:05.925 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:25:05.932 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:15:05.932 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/partition_table/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/partition_table/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/partition_table/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/partition_table/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/partition_table/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table ddl_manager.finish_mark not exists for 55-th check, retry later
+ set +x
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Starting build checksum checker...
go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d
go: downloading google.golang.org/grpc v1.62.1
go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4
go: downloading golang.org/x/net v0.24.0
go: downloading github.com/spf13/cobra v1.8.0
go: downloading github.com/gogo/protobuf v1.3.2
go: downloading github.com/BurntSushi/toml v1.3.2
go: downloading github.com/go-sql-driver/mysql v1.7.1
go: downloading go.uber.org/zap v1.27.0
go: downloading github.com/tinylib/msgp v1.1.6
go: downloading github.com/apache/pulsar-client-go v0.11.0
go: downloading github.com/aws/aws-sdk-go-v2 v1.19.1
go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5
go: downloading github.com/IBM/sarama v1.41.2
go: downloading github.com/gin-gonic/gin v1.9.1
go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b
go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5
go: downloading github.com/coreos/go-semver v0.3.1
go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754
go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e
go: downloading github.com/pierrec/lz4/v4 v4.1.18
go: downloading github.com/klauspost/compress v1.17.8
go: downloading github.com/xdg/scram v1.0.5
go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1
go: downloading github.com/aws/aws-sdk-go v1.50.0
go: downloading cloud.google.com/go/storage v1.39.1
go: downloading github.com/KimMachineGun/automemlimit v0.2.4
go: downloading github.com/json-iterator/go v1.1.12
go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c
go: downloading github.com/shirou/gopsutil/v3 v3.24.2
go: downloading golang.org/x/sync v0.7.0
go: downloading github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0
go: downloading github.com/modern-go/reflect2 v1.0.2
go: downloading github.com/phayes/freeport v0.0.0-20180830031419-95f893ade6f2
go: downloading github.com/prometheus/client_golang v1.19.0
go: downloading github.com/stretchr/testify v1.9.0
go: downloading golang.org/x/time v0.5.0
go: downloading github.com/containerd/cgroups v1.0.4
go: downloading github.com/xdg/stringprep v1.0.3
go: downloading golang.org/x/crypto v0.22.0
go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1
go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b
go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc
go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2
go: downloading github.com/stretchr/objx v0.5.2
go: downloading golang.org/x/text v0.14.0
go: downloading github.com/philhofer/fwd v1.1.1
go: downloading gopkg.in/yaml.v3 v3.0.1
go: downloading github.com/spf13/pflag v1.0.5
go: downloading github.com/gin-contrib/sse v0.1.0
go: downloading github.com/mattn/go-isatty v0.0.20
go: downloading github.com/pelletier/go-toml/v2 v2.0.8
go: downloading github.com/ugorji/go/codec v1.2.11
go: downloading google.golang.org/protobuf v1.33.0
go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd
go: downloading github.com/go-playground/validator/v10 v10.14.0
go: downloading github.com/cilium/ebpf v0.4.0
go: downloading github.com/coreos/go-systemd/v22 v22.5.0
go: downloading github.com/godbus/dbus/v5 v5.0.4
go: downloading github.com/opencontainers/runtime-spec v1.0.2
go: downloading github.com/sirupsen/logrus v1.9.3
go: downloading cloud.google.com/go v0.112.2
go: downloading golang.org/x/sys v0.19.0
go: downloading github.com/docker/go-units v0.5.0
go: downloading go.uber.org/multierr v1.11.0
go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1
table test.finish_mark not exists for 1-th check, retry later
check diff successfully
go: downloading github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2
go: downloading github.com/aws/smithy-go v1.13.5
go: downloading github.com/beorn7/perks v1.0.1
go: downloading github.com/cespare/xxhash/v2 v2.3.0
go: downloading github.com/prometheus/client_model v0.6.1
go: downloading github.com/prometheus/common v0.52.2
go: downloading github.com/prometheus/procfs v0.13.0
go: downloading github.com/gabriel-vasile/mimetype v1.4.2
go: downloading github.com/go-playground/universal-translator v0.18.1
go: downloading github.com/leodido/go-urn v1.2.4
go: downloading github.com/golang/protobuf v1.5.4
go: downloading github.com/bits-and-blooms/bitset v1.4.0
go: downloading github.com/linkedin/goavro/v2 v2.11.1
go: downloading github.com/pkg/errors v0.9.1
go: downloading github.com/eapache/go-resiliency v1.4.0
go: downloading github.com/eapache/go-xerial-snappy v0.0.0-20230731223053-c322873962e3
go: downloading github.com/eapache/queue v1.1.0
go: downloading github.com/hashicorp/go-multierror v1.1.1
go: downloading github.com/jcmturner/gofork v1.7.6
go: downloading github.com/jcmturner/gokrb5/v8 v8.4.4
wait process cdc.test exit for 1-th time...
go: downloading github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475
go: downloading github.com/AthenZ/athenz v1.10.39
go: downloading golang.org/x/oauth2 v0.18.0
go: downloading golang.org/x/mod v0.17.0
go: downloading github.com/spaolacci/murmur3 v1.1.0
go: downloading github.com/DataDog/zstd v1.5.5
go: downloading github.com/pierrec/lz4 v2.6.1+incompatible
go: downloading github.com/go-playground/locales v0.14.1
go: downloading cloud.google.com/go/compute/metadata v0.2.3
go: downloading cloud.google.com/go/iam v1.1.7
go: downloading github.com/google/uuid v1.6.0
go: downloading github.com/googleapis/gax-go/v2 v2.12.3
go: downloading cloud.google.com/go/compute v1.25.1
go: downloading google.golang.org/api v0.170.0
go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda
go: downloading github.com/hashicorp/errwrap v1.0.0
go: downloading github.com/golang/snappy v0.0.4
go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible
go: downloading github.com/jcmturner/dnsutils/v2 v2.0.0
go: downloading github.com/99designs/keyring v1.2.1
go: downloading github.com/hashicorp/go-uuid v1.0.3
go: downloading go.opentelemetry.io/otel v1.24.0
go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac
go: downloading go.opentelemetry.io/otel/trace v1.24.0
go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda
go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda
go: downloading github.com/jcmturner/rpc/v2 v2.0.3
go: downloading github.com/opentracing/opentracing-go v1.2.0
go: downloading github.com/dvsekhvalnov/jose2go v1.5.0
go: downloading github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c
go: downloading github.com/mtibben/percent v0.2.1
go: downloading golang.org/x/term v0.19.0
go: downloading github.com/jcmturner/aescts/v2 v2.0.0
go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da
wait process cdc.test exit for 2-th time...
go: downloading github.com/go-logr/logr v1.4.1
go: downloading go.opentelemetry.io/otel/metric v1.24.0
go: downloading github.com/go-logr/stdr v1.2.2
table changefeed_pause_resume.t2 exists
table changefeed_pause_resume.t3 not exists for 1-th check, retry later
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.partition_table.cli.11546.out cli tso query --pd=http://127.0.0.1:2379
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:25:11 CST 2024] <<<<<< run test case changefeed_reconstruct success! >>>>>>
table ddl_manager.finish_mark not exists for 56-th check, retry later
go: downloading github.com/ardielle/ardielle-go v1.5.2
table test.finish_mark not exists for 2-th check, retry later
+ set +x
+ tso='449545374566711297
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545374566711297 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
[Sun May  5 11:25:12 CST 2024] <<<<<< START cdc server in partition_table case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ GO_FAILPOINTS=
+ (( i = 0 ))
+ (( i <= 50 ))
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.partition_table.1159211594.out server --log-file /tmp/tidb_cdc_test/partition_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/partition_table/cdc_data --cluster-id default
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
table changefeed_pause_resume.t3 exists
go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548
go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8
go: downloading github.com/coocood/freecache v1.2.1
go: downloading github.com/cloudfoundry/gosigar v1.3.6
go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50
go: downloading github.com/google/btree v1.1.2
go: downloading github.com/jellydator/ttlcache/v3 v3.0.1
go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a
go: downloading github.com/opentracing/basictracer-go v1.1.0
go: downloading go.etcd.io/etcd/client/v3 v3.5.12
go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1
go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0
go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible
go: downloading github.com/influxdata/tdigest v0.0.1
go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5
go: downloading github.com/twmb/murmur3 v1.1.6
go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7
go: downloading github.com/dolthub/swiss v0.2.1
go: downloading github.com/cockroachdb/errors v1.11.1
go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0
go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581
go: downloading github.com/go-resty/resty/v2 v2.11.0
go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9
go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22
go: downloading go.etcd.io/etcd/api/v3 v3.5.12
go: downloading gopkg.in/yaml.v2 v2.4.0
go: downloading golang.org/x/tools v0.20.0
go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117
go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13
go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef
go: downloading github.com/tklauser/go-sysconf v0.3.12
go: downloading github.com/dolthub/maphash v0.1.0
go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec
go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358
go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4
go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1
go: downloading github.com/tklauser/numcpus v0.6.1
go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12
go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible
go: downloading github.com/kylelemons/godebug v1.1.0
go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c
go: downloading github.com/golang-jwt/jwt/v5 v5.2.0
go: downloading github.com/getsentry/sentry-go v0.27.0
go: downloading github.com/cockroachdb/redact v1.1.5
go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b
go: downloading github.com/kr/pretty v0.3.1
go: downloading github.com/rogpeppe/go-internal v1.12.0
go: downloading github.com/kr/text v0.2.0
table ddl_manager.finish_mark not exists for 57-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
go: downloading github.com/jmespath/go-jmespath v0.4.0
table test.finish_mark not exists for 3-th check, retry later
go: downloading github.com/google/s2a-go v0.1.7
go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2
go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0
go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0
go: downloading github.com/felixge/httpsnoop v1.0.4
check diff failed 1-th time, retry later
table ddl_manager.finish_mark not exists for 58-th check, retry later
Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc)
3723625472 bytes in 7.67 secs (485198982 bytes/sec)
[Pipeline] {
[Pipeline] cache
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:25:15 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/a1cadd7d-f7df-47e9-8a7f-f9e5a81b3b1c
	{"id":"a1cadd7d-f7df-47e9-8a7f-f9e5a81b3b1c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879512}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c878bad6
	a1cadd7d-f7df-47e9-8a7f-f9e5a81b3b1c

/tidb/cdc/default/default/upstream/7365351353687629454
	{"id":7365351353687629454,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/a1cadd7d-f7df-47e9-8a7f-f9e5a81b3b1c
	{"id":"a1cadd7d-f7df-47e9-8a7f-f9e5a81b3b1c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879512}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c878bad6
	a1cadd7d-f7df-47e9-8a7f-f9e5a81b3b1c

/tidb/cdc/default/default/upstream/7365351353687629454
	{"id":7365351353687629454,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/a1cadd7d-f7df-47e9-8a7f-f9e5a81b3b1c
	{"id":"a1cadd7d-f7df-47e9-8a7f-f9e5a81b3b1c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879512}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c878bad6
	a1cadd7d-f7df-47e9-8a7f-f9e5a81b3b1c

/tidb/cdc/default/default/upstream/7365351353687629454
	{"id":7365351353687629454,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.partition_table.cli.11655.out cli changefeed create --start-ts=449545374566711297 '--sink-uri=kafka://127.0.0.1:9092/ticdc-partition-table-test-11796?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760'
Create changefeed successfully!
ID: e309882a-2289-4e84-ae10-3b5147255bed
Info: {"upstream_id":7365351353687629454,"namespace":"default","id":"e309882a-2289-4e84-ae10-3b5147255bed","sink_uri":"kafka://127.0.0.1:9092/ticdc-partition-table-test-11796?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:25:16.162555804+08:00","start_ts":449545374566711297,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545374566711297,"checkpoint_ts":449545374566711297,"checkpoint_time":"2024-05-05 11:25:11.134"}
PASS
table test.finish_mark not exists for 4-th check, retry later
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
check diff failed 2-th time, retry later
table ddl_manager.finish_mark not exists for 59-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2320fc0019	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-1nvp9-6cgzr, pid:4511, start at 2024-05-05 11:25:14.725012266 +0800 CST m=+5.393707185	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:27:14.733 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:25:14.737 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:15:14.737 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2320fc0019	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-1nvp9-6cgzr, pid:4511, start at 2024-05-05 11:25:14.725012266 +0800 CST m=+5.393707185	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:27:14.733 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:25:14.737 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:15:14.737 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2321a80003	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-1nvp9-6cgzr, pid:4580, start at 2024-05-05 11:25:14.731703698 +0800 CST m=+5.349165566	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:27:14.738 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:25:14.730 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:15:14.730 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/cdc/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/cdc/tiflash/log/error.log
arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/cdc/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/cdc/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/cdc/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
+ set +x
[Sun May  5 11:25:17 CST 2024] <<<<<< START kafka consumer in partition_table case >>>>>>
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_error/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
table test.finish_mark not exists for 5-th check, retry later
check diff failed 3-th time, retry later
table ddl_manager.finish_mark not exists for 60-th check, retry later
table test.finish_mark not exists for 6-th check, retry later
[Sun May  5 11:25:19 CST 2024] <<<<<< START cdc server in cdc case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ GO_FAILPOINTS=
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cdc.59175919.out server --log-file /tmp/tidb_cdc_test/cdc/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/cdc/cdc_data --cluster-id default
+ [[ no != \n\o ]]
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
check diff failed 4-th time, retry later
table ddl_manager.finish_mark not exists for 61-th check, retry later
start tidb cluster in /tmp/tidb_cdc_test/changefeed_error
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table test.finish_mark not exists for 7-th check, retry later
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:25:22 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/63fc743d-0dc4-4dc4-84a6-7cd39c282ce2
	{"id":"63fc743d-0dc4-4dc4-84a6-7cd39c282ce2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879520}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c89d72ce
	63fc743d-0dc4-4dc4-84a6-7cd39c282ce2

/tidb/cdc/default/default/upstream/7365351398355040733
	{"id":7365351398355040733,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/63fc743d-0dc4-4dc4-84a6-7cd39c282ce2
	{"id":"63fc743d-0dc4-4dc4-84a6-7cd39c282ce2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879520}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c89d72ce
	63fc743d-0dc4-4dc4-84a6-7cd39c282ce2

/tidb/cdc/default/default/upstream/7365351398355040733
	{"id":7365351398355040733,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/63fc743d-0dc4-4dc4-84a6-7cd39c282ce2
	{"id":"63fc743d-0dc4-4dc4-84a6-7cd39c282ce2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879520}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c89d72ce
	63fc743d-0dc4-4dc4-84a6-7cd39c282ce2

/tidb/cdc/default/default/upstream/7365351398355040733
	{"id":7365351398355040733,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cdc.cli.5982.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-cdc-test-1820?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --config /tmp/tidb_cdc_test/cdc/pulsar_test.toml
check diff failed 5-th time, retry later
Create changefeed successfully!
ID: a565d8ac-fa7a-4903-9fe0-4125065012e8
Info: {"upstream_id":7365351398355040733,"namespace":"default","id":"a565d8ac-fa7a-4903-9fe0-4125065012e8","sink_uri":"kafka://127.0.0.1:9092/ticdc-cdc-test-1820?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:25:23.227980544+08:00","start_ts":449545377699856388,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545377699856388,"checkpoint_ts":449545377699856388,"checkpoint_time":"2024-05-05 11:25:23.086"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
table ddl_manager.finish_mark not exists for 62-th check, retry later
table test.finish_mark not exists for 8-th check, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
+ set +x
[Sun May  5 11:25:24 CST 2024] <<<<<< START kafka consumer in cdc case >>>>>>
go: downloading github.com/go-sql-driver/mysql v1.7.1
go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d
go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f
go: downloading github.com/BurntSushi/toml v1.3.2
go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4
go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b
go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5
go: downloading go.uber.org/zap v1.27.0
go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1
go: downloading go.uber.org/atomic v1.11.0
go: downloading go.uber.org/multierr v1.11.0
go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c
go: downloading google.golang.org/grpc v1.62.1
go: downloading github.com/coreos/go-semver v0.3.1
go: downloading golang.org/x/net v0.24.0
go: downloading github.com/golang/protobuf v1.5.4
go: downloading golang.org/x/sys v0.19.0
go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda
go: downloading google.golang.org/protobuf v1.33.0
go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda
check diff successfully
go: downloading golang.org/x/text v0.14.0
table ddl_manager.finish_mark not exists for 63-th check, retry later
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_capture/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
table test.finish_mark exists
check diff successfully
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
wait process cdc.test exit for 1-th time...
check diff failed 1-th time, retry later
go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548
go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8
go: downloading golang.org/x/sync v0.7.0
go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50
go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5
go: downloading go.etcd.io/etcd/client/v3 v3.5.12
go: downloading github.com/prometheus/client_golang v1.19.0
go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a
go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible
go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e
go: downloading github.com/spf13/pflag v1.0.5
go: downloading gopkg.in/yaml.v2 v2.4.0
go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754
go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22
go: downloading github.com/influxdata/tdigest v0.0.1
go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7
go: downloading github.com/jellydator/ttlcache/v3 v3.0.1
go: downloading github.com/coocood/freecache v1.2.1
go: downloading github.com/opentracing/opentracing-go v1.2.0
go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0
go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5
go: downloading github.com/shirou/gopsutil/v3 v3.24.2
go: downloading github.com/opentracing/basictracer-go v1.1.0
go: downloading github.com/cockroachdb/errors v1.11.1
go: downloading github.com/docker/go-units v0.5.0
go: downloading github.com/twmb/murmur3 v1.1.6
go: downloading github.com/google/uuid v1.6.0
go: downloading github.com/prometheus/client_model v0.6.1
go: downloading github.com/stretchr/testify v1.9.0
go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117
go: downloading go.etcd.io/etcd/api/v3 v3.5.12
go: downloading github.com/scalalang2/golang-fifo v0.1.5
go: downloading github.com/tidwall/btree v1.7.0
go: downloading github.com/gorilla/mux v1.8.0
go: downloading cloud.google.com/go/storage v1.39.1
go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1
go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1
go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0
go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581
go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b
go: downloading github.com/aws/aws-sdk-go v1.50.0
go: downloading github.com/go-resty/resty/v2 v2.11.0
go: downloading github.com/klauspost/compress v1.17.8
go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9
go: downloading golang.org/x/oauth2 v0.18.0
go: downloading cloud.google.com/go v0.112.2
go: downloading google.golang.org/api v0.170.0
go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef
go: downloading github.com/google/btree v1.1.2
go: downloading github.com/cespare/xxhash/v2 v2.3.0
go: downloading go.uber.org/mock v0.4.0
go: downloading github.com/gogo/protobuf v1.3.2
go: downloading golang.org/x/tools v0.20.0
go: downloading github.com/golang/snappy v0.0.4
go: downloading github.com/cockroachdb/pebble v1.1.0
go: downloading github.com/jfcg/sorty/v2 v2.1.0
go: downloading golang.org/x/time v0.5.0
go: downloading github.com/carlmjohnson/flagext v0.21.0
go: downloading github.com/dolthub/swiss v0.2.1
go: downloading github.com/dgraph-io/ristretto v0.1.1
go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec
go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358
go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4
go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc
go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2
go: downloading gopkg.in/yaml.v3 v3.0.1
table ddl_manager.finish_mark not exists for 64-th check, retry later
wait process cdc.test exit for 2-th time...
go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12
go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1
go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1
go: downloading golang.org/x/crypto v0.22.0
go: downloading github.com/beorn7/perks v1.0.1
go: downloading github.com/prometheus/procfs v0.13.0
go: downloading github.com/prometheus/common v0.52.2
go: downloading github.com/pkg/errors v0.9.1
go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible
go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b
go: downloading github.com/cockroachdb/redact v1.1.5
go: downloading github.com/getsentry/sentry-go v0.27.0
go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df
go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2
go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21
go: downloading github.com/cloudfoundry/gosigar v1.3.6
go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13
go: downloading github.com/otiai10/copy v1.2.0
go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2
go: downloading github.com/spkg/bom v1.0.0
go: downloading github.com/xitongsys/parquet-go v1.6.0
go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815
go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f
go: downloading github.com/tklauser/go-sysconf v0.3.12
go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda
go: downloading github.com/dolthub/maphash v0.1.0
go: downloading github.com/jfcg/sixb v1.3.8
go: downloading cloud.google.com/go/compute/metadata v0.2.3
go: downloading github.com/kr/pretty v0.3.1
go: downloading github.com/cheggaaa/pb/v3 v3.0.8
go: downloading cloud.google.com/go/compute v1.25.1
go: downloading cloud.google.com/go/iam v1.1.7
go: downloading github.com/googleapis/gax-go/v2 v2.12.3
go: downloading github.com/robfig/cron/v3 v3.0.1
go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989
go: downloading github.com/robfig/cron v1.2.0
go: downloading github.com/coreos/go-systemd/v22 v22.5.0
go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d
go: downloading github.com/kylelemons/godebug v1.1.0
go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c
go: downloading github.com/kr/text v0.2.0
go: downloading github.com/rogpeppe/go-internal v1.12.0
go: downloading github.com/mattn/go-runewidth v0.0.15
go: downloading go.opentelemetry.io/otel v1.24.0
go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac
go: downloading go.opentelemetry.io/otel/trace v1.24.0
go: downloading github.com/apache/thrift v0.16.0
go: downloading github.com/VividCortex/ewma v1.2.0
go: downloading github.com/fatih/color v1.16.0
go: downloading github.com/mattn/go-colorable v0.1.13
go: downloading github.com/mattn/go-isatty v0.0.20
go: downloading github.com/tklauser/numcpus v0.6.1
go: downloading github.com/lestrrat-go/blackmagic v1.0.2
go: downloading github.com/lestrrat-go/httprc v1.0.5
go: downloading github.com/lestrrat-go/iter v1.0.2
go: downloading github.com/lestrrat-go/option v1.0.1
go: downloading github.com/dustin/go-humanize v1.0.1
go: downloading github.com/golang/glog v1.2.0
go: downloading github.com/golang-jwt/jwt/v5 v5.2.0
go: downloading github.com/rivo/uniseg v0.4.7
go: downloading github.com/lestrrat-go/httpcc v1.0.1
go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da
go: downloading github.com/ncw/directio v1.0.5
go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2
go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64
go: downloading github.com/klauspost/cpuid v1.3.1
go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible
wait process cdc.test exit for 3-th time...
go: downloading github.com/go-logr/logr v1.4.1
go: downloading github.com/go-logr/stdr v1.2.2
go: downloading go.opentelemetry.io/otel/metric v1.24.0
go: downloading github.com/DataDog/zstd v1.5.5
go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06
cdc.test: no process found
wait process cdc.test exit for 4-th time...
process cdc.test already exit
[Sun May  5 11:25:28 CST 2024] <<<<<< run test case kafka_simple_basic success! >>>>>>
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
check diff failed 2-th time, retry later
table ddl_manager.finish_mark not exists for 65-th check, retry later
start tidb cluster in /tmp/tidb_cdc_test/multi_capture
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table ddl_manager.finish_mark not exists for 66-th check, retry later
Cache restored successfully (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc)
3723625472 bytes in 13.30 secs (279928271 bytes/sec)
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] container
check diff failed 3-th time, retry later
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 6 min 0 sec
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 6 min 0 sec
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 6 min 0 sec
[Pipeline] {
go: downloading github.com/jmespath/go-jmespath v0.4.0
go: downloading github.com/google/s2a-go v0.1.7
go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0
go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2
go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0
go: downloading github.com/felixge/httpsnoop v1.0.4
[Pipeline] timeout
Timeout set to expire in 6 min 0 sec
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 6 min 0 sec
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 6 min 0 sec
[Pipeline] {
[Pipeline] sh
[Pipeline] sh
+ echo Waiting for zookeeper to be ready...
Waiting for zookeeper to be ready...
+ nc -z localhost 2181
+ echo Waiting for kafka to be ready...
Waiting for kafka to be ready...
+ nc -z localhost 9092
+ echo Waiting for kafka-broker to be ready...
Waiting for kafka-broker to be ready...
+ echo dump
+ nc localhost 2181
+ grep brokers
+ awk {$1=$1;print}
+ grep -F -w /brokers/ids/1
/brokers/ids/1
[Pipeline] sh
go: downloading github.com/json-iterator/go v1.1.12
go: downloading github.com/modern-go/reflect2 v1.0.2
go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd
table partition_table.t exists
table partition_table.t1 exists
table partition_table.t2 not exists for 1-th check, retry later
+ echo Waiting for zookeeper to be ready...
Waiting for zookeeper to be ready...
+ nc -z localhost 2181
+ echo Waiting for kafka to be ready...
Waiting for kafka to be ready...
+ nc -z localhost 9092
+ echo Waiting for kafka-broker to be ready...
Waiting for kafka-broker to be ready...
+ echo dump
+ nc localhost 2181
+ grep brokers
+ awk {$1=$1;print}
+ grep -F -w /brokers/ids/1
/brokers/ids/1
[Pipeline] sh
+ echo Waiting for zookeeper to be ready...
Waiting for zookeeper to be ready...
+ nc -z localhost 2181
+ echo Waiting for kafka to be ready...
Waiting for kafka to be ready...
+ nc -z localhost 9092
+ echo Waiting for kafka-broker to be ready...
Waiting for kafka-broker to be ready...
+ echo dump
+ nc localhost 2181
+ grep brokers
+ awk {$1=$1;print}
+ grep -F -w /brokers/ids/1
/brokers/ids/1
[Pipeline] sh
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b242998000c	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pvtgm-6sx1h, pid:4197, start at 2024-05-05 11:25:31.634004886 +0800 CST m=+5.063916342	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:27:31.639 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:25:31.622 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:15:31.622 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b242998000c	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pvtgm-6sx1h, pid:4197, start at 2024-05-05 11:25:31.634004886 +0800 CST m=+5.063916342	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:27:31.639 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:25:31.622 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:15:31.622 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b242b140009	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pvtgm-6sx1h, pid:4277, start at 2024-05-05 11:25:31.724286062 +0800 CST m=+5.099321575	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:27:31.733 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:25:31.717 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:15:31.717 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/changefeed_error/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/changefeed_error/tiflash/log/error.log
arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_error/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_error/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_error/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
+ echo Waiting for zookeeper to be ready...
Waiting for zookeeper to be ready...
+ nc -z localhost 2181
+ echo Waiting for kafka to be ready...
Waiting for kafka to be ready...
+ nc -z localhost 9092
+ echo Waiting for kafka-broker to be ready...
Waiting for kafka-broker to be ready...
+ echo dump
+ nc localhost 2181
+ grep brokers
+ awk {$1=$1;print}
+ grep -F -w /brokers/ids/1
+ sleep 10
[Pipeline] sh
check diff failed 4-th time, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
+ echo Waiting for zookeeper to be ready...
Waiting for zookeeper to be ready...
+ nc -z localhost 2181
+ echo Waiting for kafka to be ready...
Waiting for kafka to be ready...
+ nc -z localhost 9092
+ echo Waiting for kafka-broker to be ready...
Waiting for kafka-broker to be ready...
+ echo dump
+ nc localhost 2181
+ grep brokers
+ awk {$1=$1;print}
+ grep -F -w /brokers/ids/1
/brokers/ids/1
table ddl_manager.finish_mark not exists for 67-th check, retry later
+ echo Waiting for zookeeper to be ready...
Waiting for zookeeper to be ready...
+ nc -z localhost 2181
+ echo Waiting for kafka to be ready...
Waiting for kafka to be ready...
+ nc -z localhost 9092
+ echo Waiting for kafka-broker to be ready...
Waiting for kafka-broker to be ready...
+ echo dump
+ nc localhost 2181
+ grep brokers
+ awk {$1=$1;print}
+ grep -F -w /brokers/ids/1
/brokers/ids/1
table partition_table.t2 exists
table partition_table.finish_mark not exists for 1-th check, retry later
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.5728.out cli tso query --pd=http://127.0.0.1:2379
Starting Upstream TiDB...
[2024/05/05 11:25:31.808 +08:00] [INFO] [main.go:99] ["running ddl test: 1 modifyColumnDefaultValueDDL2"]
[2024/05/05 11:25:31.808 +08:00] [INFO] [main.go:99] ["running ddl test: 0 modifyColumnDefaultValueDDL1"]
[2024/05/05 11:25:31.899 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs162b76d4_114d_472d_9764_de7fdc7e866f"]
[2024/05/05 11:25:31.999 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs504e1c6b_2058_41a4_85f9_c4d2283775db"]
[2024/05/05 11:25:32.045 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs90ac7db5_6b85_4f41_a3b3_e238dce5e091"]
[2024/05/05 11:25:32.071 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsae349078_fe40_4577_9bf0_3cee70cd3540"]
[2024/05/05 11:25:32.119 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsdd85b2d2_0d5e_4779_846a_8c7910f4d32a"]
[2024/05/05 11:25:32.154 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsec0222f3_f0a8_4c66_a1fe_02300c0556e9"]
[2024/05/05 11:25:32.221 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:32.224 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:25:32.334 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsbb80fb7f_a19b_4ab4_be9b_00ca713c031c"]
[2024/05/05 11:25:32.337 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs04f92b72_b99e_44d9_a1f3_f7631c0438f7"]
[2024/05/05 11:25:32.417 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:32.417 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:25:32.512 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:32.516 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:25:32.545 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:32.599 +08:00] [INFO] [main.go:178] ["1 insert success: 100"]
[2024/05/05 11:25:32.605 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:25:32.608 +08:00] [INFO] [main.go:178] ["1 insert success: 100"]
[2024/05/05 11:25:32.647 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:32.654 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:25:32.735 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:32.737 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:25:32.822 +08:00] [INFO] [main.go:178] ["0 insert success: 100"]
[2024/05/05 11:25:32.825 +08:00] [INFO] [main.go:178] ["0 insert success: 100"]
[2024/05/05 11:25:32.826 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:25:32.832 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:25:32.925 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:32.925 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:25:32.926 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:32.932 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:25:32.949 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:25:32.952 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:25:33.032 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:25:33.036 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:25:33.133 +08:00] [INFO] [main.go:178] ["1 insert success: 200"]
[2024/05/05 11:25:33.137 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:25:33.143 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:25:33.143 +08:00] [INFO] [main.go:178] ["1 insert success: 200"]
[2024/05/05 11:25:33.235 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:25:33.242 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:25:33.313 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:25:33.318 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:25:33.424 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:33.425 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:25:33.530 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:25:33.531 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:25:33.534 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:25:33.535 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:25:33.600 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:33.602 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:25:33.623 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:33.632 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:25:33.638 +08:00] [INFO] [main.go:178] ["0 insert success: 200"]
[2024/05/05 11:25:33.638 +08:00] [INFO] [main.go:178] ["0 insert success: 200"]
[2024/05/05 11:25:33.643 +08:00] [INFO] [main.go:199] ["0 delete success: 100"]
[2024/05/05 11:25:33.644 +08:00] [INFO] [main.go:199] ["0 delete success: 100"]
[2024/05/05 11:25:33.709 +08:00] [INFO] [main.go:178] ["1 insert success: 300"]
[2024/05/05 11:25:33.717 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:25:33.721 +08:00] [INFO] [main.go:178] ["1 insert success: 300"]
[2024/05/05 11:25:33.722 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:33.814 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:33.824 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:25:33.846 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:33.900 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:25:33.946 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:25:34.018 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:34.107 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:34.114 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:34.120 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:25:34.124 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:25:34.139 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:25:34.203 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:34.209 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:25:34.245 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:34.302 +08:00] [INFO] [main.go:178] ["1 insert success: 400"]
[2024/05/05 11:25:34.317 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:25:34.318 +08:00] [INFO] [main.go:178] ["1 insert success: 400"]
[2024/05/05 11:25:34.332 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:34.416 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:25:34.431 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:34.443 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:25:34.507 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:34.507 +08:00] [INFO] [main.go:178] ["0 insert success: 300"]
[2024/05/05 11:25:34.508 +08:00] [INFO] [main.go:178] ["0 insert success: 300"]
[2024/05/05 11:25:34.550 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:25:34.612 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:25:34.709 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:25:34.715 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:34.717 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:34.728 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:25:34.800 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:25:34.801 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:25:34.822 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:25:34.840 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:25:34.915 +08:00] [INFO] [main.go:178] ["1 insert success: 500"]
[2024/05/05 11:25:34.929 +08:00] [INFO] [main.go:178] ["1 insert success: 500"]
[2024/05/05 11:25:34.932 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:25:34.940 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:25:35.031 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:25:35.038 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:25:35.106 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:25:35.125 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[2024/05/05 11:25:35.223 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:25:35.233 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:25:35.324 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:25:35.325 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:25:35.330 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:25:35.336 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:25:35.347 +08:00] [INFO] [main.go:178] ["0 insert success: 400"]
[2024/05/05 11:25:35.402 +08:00] [INFO] [main.go:178] ["0 insert success: 400"]
[2024/05/05 11:25:35.405 +08:00] [INFO] [main.go:199] ["0 delete success: 200"]
[2024/05/05 11:25:35.407 +08:00] [INFO] [main.go:199] ["0 delete success: 200"]
[2024/05/05 11:25:35.410 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:25:35.413 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:25:35.433 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:25:35.450 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
table ddl_manager.finish_mark not exists for 68-th check, retry later
[2024/05/05 11:25:35.514 +08:00] [INFO] [main.go:178] ["1 insert success: 600"]
[2024/05/05 11:25:35.526 +08:00] [INFO] [main.go:178] ["1 insert success: 600"]
[2024/05/05 11:25:35.530 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:25:35.541 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:25:35.625 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:25:35.629 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:25:35.644 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:25:35.711 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
table partition_table.finish_mark not exists for 2-th check, retry later
check diff failed 5-th time, retry later
[2024/05/05 11:25:35.809 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:25:35.811 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:35.908 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:25:35.909 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:25:35.910 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:25:35.915 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:25:35.945 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:25:36.001 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:36.024 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:25:36.042 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:36.114 +08:00] [INFO] [main.go:178] ["1 insert success: 700"]
[2024/05/05 11:25:36.134 +08:00] [INFO] [main.go:178] ["1 insert success: 700"]
[2024/05/05 11:25:36.135 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:25:36.199 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:36.211 +08:00] [INFO] [main.go:178] ["0 insert success: 500"]
[2024/05/05 11:25:36.218 +08:00] [INFO] [main.go:178] ["0 insert success: 500"]
[2024/05/05 11:25:36.238 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:25:36.243 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:36.300 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:25:36.314 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:36.404 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:25:36.413 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:25:36.509 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:25:36.512 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:36.522 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:25:36.522 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:36.601 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:25:36.605 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:25:36.627 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:25:36.647 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:25:36.650 +08:00] [INFO] [main.go:178] ["1 insert success: 800"]
[2024/05/05 11:25:36.729 +08:00] [INFO] [main.go:178] ["1 insert success: 800"]
[2024/05/05 11:25:36.737 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
+ set +x
+ tso='449545380868128769
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545380868128769 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
[2024/05/05 11:25:36.809 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:25:36.904 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:25:36.906 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:25:36.925 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:25:36.937 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[2024/05/05 11:25:37.023 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:25:37.031 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:25:37.047 +08:00] [INFO] [main.go:178] ["0 insert success: 600"]
[2024/05/05 11:25:37.048 +08:00] [INFO] [main.go:178] ["0 insert success: 600"]
[2024/05/05 11:25:37.101 +08:00] [INFO] [main.go:199] ["0 delete success: 300"]
[2024/05/05 11:25:37.104 +08:00] [INFO] [main.go:199] ["0 delete success: 300"]
[2024/05/05 11:25:37.131 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:25:37.135 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:25:37.135 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:25:37.142 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:25:37.226 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:25:37.227 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:25:37.248 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:25:37.316 +08:00] [INFO] [main.go:178] ["1 insert success: 900"]
[2024/05/05 11:25:37.323 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:25:37.342 +08:00] [INFO] [main.go:178] ["1 insert success: 900"]
[2024/05/05 11:25:37.349 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:25:37.416 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
table ddl_manager.finish_mark not exists for 69-th check, retry later
[2024/05/05 11:25:37.515 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:25:37.518 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:25:37.522 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:25:37.541 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:25:37.642 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:25:37.643 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:25:37.735 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:25:37.739 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:25:37.744 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:25:37.745 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
check diff failed 6-th time, retry later
***************** properties *****************
"updateproportion"="0"
"dotransactions"="false"
"workload"="core"
"mysql.port"="4000"
"operationcount"="0"
"readproportion"="0"
"scanproportion"="0"
"mysql.user"="root"
"readallfields"="true"
"insertproportion"="0"
"mysql.db"="changefeed_error"
"threadcount"="4"
"recordcount"="20"
"mysql.host"="127.0.0.1"
"requestdistribution"="uniform"
**********************************************
Run finished, takes 8.389428ms
INSERT - Takes(s): 0.0, Count: 19, OPS: 4058.4, Avg(us): 1386, Min(us): 865, Max(us): 3591, 95th(us): 4000, 99th(us): 4000
[Sun May  5 11:25:37 CST 2024] <<<<<< START cdc server in changefeed_error case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/NewChangefeedNoRetryError=1*return(true)'
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.58015803.out server --log-file /tmp/tidb_cdc_test/changefeed_error/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_error/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
[2024/05/05 11:25:37.838 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:25:37.839 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:25:37.945 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:25:37.965 +08:00] [INFO] [main.go:178] ["0 insert success: 700"]
[2024/05/05 11:25:37.966 +08:00] [INFO] [main.go:178] ["1 insert success: 1000"]
[2024/05/05 11:25:37.966 +08:00] [INFO] [main.go:178] ["0 insert success: 700"]
[2024/05/05 11:25:37.973 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:25:37.987 +08:00] [INFO] [main.go:178] ["1 insert success: 1000"]
[2024/05/05 11:25:38.007 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:25:38.016 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:25:38.055 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:25:38.106 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:25:38.111 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:25:38.121 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:25:38.231 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:25:38.237 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
table partition_table.finish_mark not exists for 3-th check, retry later
[2024/05/05 11:25:38.326 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:25:38.340 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:25:38.348 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:25:38.348 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:25:38.400 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:25:38.407 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:25:38.619 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:25:38.626 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:25:38.629 +08:00] [INFO] [main.go:178] ["1 insert success: 1100"]
[2024/05/05 11:25:38.630 +08:00] [INFO] [main.go:178] ["1 insert success: 1100"]
[2024/05/05 11:25:38.633 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:25:38.646 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:25:38.714 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:25:38.735 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:25:38.740 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:25:38.742 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:25:38.839 +08:00] [INFO] [main.go:178] ["0 insert success: 800"]
[2024/05/05 11:25:38.844 +08:00] [INFO] [main.go:199] ["0 delete success: 400"]
[2024/05/05 11:25:38.907 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:25:38.909 +08:00] [INFO] [main.go:178] ["0 insert success: 800"]
[2024/05/05 11:25:38.915 +08:00] [INFO] [main.go:199] ["0 delete success: 400"]
[2024/05/05 11:25:38.929 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:25:39.033 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:25:39.038 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:25:39.110 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:25:39.124 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:25:39.132 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:25:39.134 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
table ddl_manager.finish_mark not exists for 70-th check, retry later
[2024/05/05 11:25:39.314 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:25:39.325 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:25:39.332 +08:00] [INFO] [main.go:178] ["1 insert success: 1200"]
[2024/05/05 11:25:39.332 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:25:39.333 +08:00] [INFO] [main.go:178] ["1 insert success: 1200"]
[2024/05/05 11:25:39.344 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:25:39.403 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:25:39.420 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:25:39.439 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:25:39.441 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:25:39.529 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
check diff successfully
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[2024/05/05 11:25:39.615 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:25:39.648 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:25:39.699 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:25:39.729 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:25:39.743 +08:00] [INFO] [main.go:178] ["0 insert success: 900"]
[2024/05/05 11:25:39.801 +08:00] [INFO] [main.go:178] ["0 insert success: 900"]
[2024/05/05 11:25:39.810 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:25:39.815 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:25:39.819 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:25:39.929 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:25:39.936 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:25:40.004 +08:00] [INFO] [main.go:178] ["1 insert success: 1300"]
[2024/05/05 11:25:40.010 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:25:40.014 +08:00] [INFO] [main.go:178] ["1 insert success: 1300"]
[2024/05/05 11:25:40.023 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:25:40.024 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:25:40.031 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
table partition_table.finish_mark not exists for 4-th check, retry later
[2024/05/05 11:25:40.124 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:25:40.127 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:25:40.199 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:25:40.317 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:25:40.328 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:25:40.335 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:25:40.414 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:25:40.509 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:25:40.518 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:25:40.518 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:25:40.551 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:25:40.604 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:25:40.644 +08:00] [INFO] [main.go:178] ["0 insert success: 1000"]
[2024/05/05 11:25:40.646 +08:00] [INFO] [main.go:178] ["1 insert success: 1400"]
[2024/05/05 11:25:40.647 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:25:40.649 +08:00] [INFO] [main.go:199] ["0 delete success: 500"]
[2024/05/05 11:25:40.716 +08:00] [INFO] [main.go:178] ["0 insert success: 1000"]
[2024/05/05 11:25:40.720 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:25:40.722 +08:00] [INFO] [main.go:199] ["0 delete success: 500"]
[2024/05/05 11:25:40.725 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:25:40.728 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:25:40.728 +08:00] [INFO] [main.go:178] ["1 insert success: 1400"]
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:25:40 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/380e255b-4dc8-427a-a472-5b88486e5e84
	{"id":"380e255b-4dc8-427a-a472-5b88486e5e84","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879538}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8e4b0ed
	380e255b-4dc8-427a-a472-5b88486e5e84

/tidb/cdc/default/default/upstream/7365351471431534517
	{"id":7365351471431534517,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/380e255b-4dc8-427a-a472-5b88486e5e84
	{"id":"380e255b-4dc8-427a-a472-5b88486e5e84","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879538}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8e4b0ed
	380e255b-4dc8-427a-a472-5b88486e5e84

/tidb/cdc/default/default/upstream/7365351471431534517
	{"id":7365351471431534517,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/380e255b-4dc8-427a-a472-5b88486e5e84
	{"id":"380e255b-4dc8-427a-a472-5b88486e5e84","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879538}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8e4b0ed
	380e255b-4dc8-427a-a472-5b88486e5e84

/tidb/cdc/default/default/upstream/7365351471431534517
	{"id":7365351471431534517,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.5861.out cli changefeed create --start-ts=449545380868128769 '--sink-uri=kafka://127.0.0.1:9092/ticdc-sink-retry-test-28424?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c changefeed-error
[2024/05/05 11:25:40.820 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:25:40.823 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:25:40.829 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"]
[2024/05/05 11:25:40.946 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:25:41.004 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:25:41.018 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:25:41.041 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"]
table ddl_manager.finish_mark not exists for 71-th check, retry later
Create changefeed successfully!
ID: changefeed-error
Info: {"upstream_id":7365351471431534517,"namespace":"default","id":"changefeed-error","sink_uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-28424?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:25:41.25562575+08:00","start_ts":449545380868128769,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545380868128769,"checkpoint_ts":449545380868128769,"checkpoint_time":"2024-05-05 11:25:35.172"}
PASS
[2024/05/05 11:25:41.150 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:25:41.202 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:25:41.205 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:25:41.231 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"]
[2024/05/05 11:25:41.243 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"]
check diff failed 1-th time, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
[2024/05/05 11:25:41.328 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"]
[2024/05/05 11:25:41.333 +08:00] [INFO] [main.go:178] ["1 insert success: 1500"]
[2024/05/05 11:25:41.346 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"]
[2024/05/05 11:25:41.350 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:25:41.402 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:25:41.405 +08:00] [INFO] [main.go:178] ["1 insert success: 1500"]
[2024/05/05 11:25:41.448 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:25:41.449 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:25:41.542 +08:00] [INFO] [main.go:178] ["0 insert success: 1100"]
[2024/05/05 11:25:41.550 +08:00] [INFO] [main.go:178] ["0 insert success: 1100"]
[2024/05/05 11:25:41.624 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"]
[2024/05/05 11:25:41.629 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"]
[2024/05/05 11:25:41.653 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"]
[2024/05/05 11:25:41.808 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:25:41.825 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"]
[2024/05/05 11:25:41.831 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:25:41.839 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"]
[2024/05/05 11:25:41.861 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"]
[2024/05/05 11:25:41.914 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"]
[2024/05/05 11:25:41.915 +08:00] [INFO] [main.go:178] ["1 insert success: 1600"]
[2024/05/05 11:25:41.930 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"]
[2024/05/05 11:25:41.942 +08:00] [INFO] [main.go:178] ["1 insert success: 1600"]
[2024/05/05 11:25:42.004 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"]
[2024/05/05 11:25:42.006 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"]
table partition_table.finish_mark not exists for 5-th check, retry later
[2024/05/05 11:25:42.104 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"]
[2024/05/05 11:25:42.106 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"]
[2024/05/05 11:25:42.172 +08:00] [INFO] [main.go:178] ["0 insert success: 1200"]
[2024/05/05 11:25:42.173 +08:00] [INFO] [main.go:178] ["0 insert success: 1200"]
[2024/05/05 11:25:42.176 +08:00] [INFO] [main.go:199] ["0 delete success: 600"]
[2024/05/05 11:25:42.177 +08:00] [INFO] [main.go:199] ["0 delete success: 600"]
[2024/05/05 11:25:42.239 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"]
[2024/05/05 11:25:42.256 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"]
[2024/05/05 11:25:42.284 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"]
[2024/05/05 11:25:42.316 +08:00] [INFO] [main.go:178] ["1 insert success: 1700"]
[2024/05/05 11:25:42.340 +08:00] [INFO] [main.go:178] ["1 insert success: 1700"]
[2024/05/05 11:25:42.369 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"]
[2024/05/05 11:25:42.445 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"]
[2024/05/05 11:25:42.449 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"]
[2024/05/05 11:25:42.511 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsc86275e3_6e10_421c_b14c_a2047e2a4300"]
[2024/05/05 11:25:42.566 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"]
[2024/05/05 11:25:42.588 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"]
[2024/05/05 11:25:42.631 +08:00] [INFO] [main.go:178] ["0 insert success: 1300"]
[2024/05/05 11:25:42.632 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"]
[2024/05/05 11:25:42.674 +08:00] [INFO] [main.go:178] ["1 insert success: 1800"]
[2024/05/05 11:25:42.677 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs0406d42d_08cc_48d8_b3a8_bc8867b923ff"]
[2024/05/05 11:25:42.784 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"]
[2024/05/05 11:25:42.829 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs01932726_0315_4a5d_9b35_4c863fcd7dc9"]
[2024/05/05 11:25:42.832 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:42.840 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
+ set +x
[Sun May  5 11:25:42 CST 2024] <<<<<< START kafka consumer in changefeed_error case >>>>>>
check_changefeed_state http://127.0.0.1:2379 changefeed-error failed [CDC:ErrStartTsBeforeGC]
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=changefeed-error
+ expected_state=failed
+ error_msg='[CDC:ErrStartTsBeforeGC]'
+ tls_dir='[CDC:ErrStartTsBeforeGC]'
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-error -s
+ info='{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-error",
  "state": "failed",
  "checkpoint_tso": 449545380868128769,
  "checkpoint_time": "2024-05-05 11:25:35.172",
  "error": {
    "time": "2024-05-05T11:25:41.33747625+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrStartTsBeforeGC",
    "message": "[CDC:ErrStartTsBeforeGC]fail to create or maintain changefeed because start-ts 449545380868128469 is earlier than or equal to GC safepoint at 449545380868128769"
  }
}'
+ echo '{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-error",
  "state": "failed",
  "checkpoint_tso": 449545380868128769,
  "checkpoint_time": "2024-05-05 11:25:35.172",
  "error": {
    "time": "2024-05-05T11:25:41.33747625+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrStartTsBeforeGC",
    "message": "[CDC:ErrStartTsBeforeGC]fail to create or maintain changefeed because start-ts 449545380868128469 is earlier than or equal to GC safepoint at 449545380868128769"
  }
}'
{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-error",
  "state": "failed",
  "checkpoint_tso": 449545380868128769,
  "checkpoint_time": "2024-05-05 11:25:35.172",
  "error": {
    "time": "2024-05-05T11:25:41.33747625+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrStartTsBeforeGC",
    "message": "[CDC:ErrStartTsBeforeGC]fail to create or maintain changefeed because start-ts 449545380868128469 is earlier than or equal to GC safepoint at 449545380868128769"
  }
}
++ jq -r .state
++ echo '{' '"upstream_id":' 7365351471431534517, '"namespace":' '"default",' '"id":' '"changefeed-error",' '"state":' '"failed",' '"checkpoint_tso":' 449545380868128769, '"checkpoint_time":' '"2024-05-05' '11:25:35.172",' '"error":' '{' '"time":' '"2024-05-05T11:25:41.33747625+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrStartTsBeforeGC",' '"message":' '"[CDC:ErrStartTsBeforeGC]fail' to create or maintain changefeed because start-ts 449545380868128469 is earlier than or equal to GC safepoint at '449545380868128769"' '}' '}'
+ state=failed
+ [[ ! failed == \f\a\i\l\e\d ]]
++ jq -r .error.message
++ echo '{' '"upstream_id":' 7365351471431534517, '"namespace":' '"default",' '"id":' '"changefeed-error",' '"state":' '"failed",' '"checkpoint_tso":' 449545380868128769, '"checkpoint_time":' '"2024-05-05' '11:25:35.172",' '"error":' '{' '"time":' '"2024-05-05T11:25:41.33747625+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrStartTsBeforeGC",' '"message":' '"[CDC:ErrStartTsBeforeGC]fail' to create or maintain changefeed because start-ts 449545380868128469 is earlier than or equal to GC safepoint at '449545380868128769"' '}' '}'
+ message='[CDC:ErrStartTsBeforeGC]fail to create or maintain changefeed because start-ts 449545380868128469 is earlier than or equal to GC safepoint at 449545380868128769'
+ [[ ! [CDC:ErrStartTsBeforeGC]fail to create or maintain changefeed because start-ts 449545380868128469 is earlier than or equal to GC safepoint at 449545380868128769 =~ \[CDC:ErrStartTsBeforeGC] ]]
run task successfully
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.5970.out cli changefeed resume -c changefeed-error
[2024/05/05 11:25:42.998 +08:00] [INFO] [main.go:178] ["1 insert success: 1900"]
[2024/05/05 11:25:43.010 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:43.036 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:25:43.042 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs32e883d5_bd50_48b7_8d08_c369bbe9a9a1"]
[2024/05/05 11:25:43.076 +08:00] [INFO] [main.go:178] ["0 insert success: 1400"]
[2024/05/05 11:25:43.081 +08:00] [INFO] [main.go:199] ["0 delete success: 700"]
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_basic_avro/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
+ echo dump
+ nc localhost 2181
+ grep brokers
+ + awkgrep {$1=$1;print} -F
 -w /brokers/ids/1
/brokers/ids/1
[2024/05/05 11:25:43.171 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:25:43.178 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:43.182 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:25:43.183 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsfa83eeeb_854f_4920_a1a8_5c057c91a6e3"]
[2024/05/05 11:25:43.185 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:25:43.229 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsef2b04c7_c9aa_4a6b_9816_c10193d8ba4a"]
[2024/05/05 11:25:43.328 +08:00] [INFO] [main.go:178] ["1 insert success: 2000"]
[2024/05/05 11:25:43.338 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:25:43.358 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b24b0480011	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f, pid:7619, start at 2024-05-05 11:25:40.260581894 +0800 CST m=+5.216284604	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:27:40.269 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:25:40.242 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:15:40.242 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b24b0480011	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f, pid:7619, start at 2024-05-05 11:25:40.260581894 +0800 CST m=+5.216284604	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:27:40.269 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:25:40.242 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:15:40.242 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b24b1000015	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f, pid:7699, start at 2024-05-05 11:25:40.327300785 +0800 CST m=+5.221727873	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:27:40.334 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:25:40.338 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:15:40.338 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/multi_capture/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/multi_capture/tiflash/log/error.log
arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_capture/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_capture/tiflash-proxy.toml"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_capture/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
[2024/05/05 11:25:43.371 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:43.468 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:25:43.549 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:25:43.568 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:25:43.568 +08:00] [INFO] [main.go:178] ["0 insert success: 1500"]
[2024/05/05 11:25:43.605 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:43.608 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:25:43.611 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:43.613 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
table ddl_manager.finish_mark not exists for 72-th check, retry later
PASS
coverage: 2.1% of statements in github.com/pingcap/tiflow/...
[2024/05/05 11:25:43.641 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:43.646 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:25:43.694 +08:00] [INFO] [main.go:178] ["1 insert success: 2100"]
[2024/05/05 11:25:43.721 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:25:43.729 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:43.744 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs25bd7591_1128_46e3_8313_3c860ac66143"]
[2024/05/05 11:25:43.770 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:25:43.843 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
check diff failed 2-th time, retry later
table partition_table.finish_mark not exists for 6-th check, retry later
[2024/05/05 11:25:43.928 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs244e75fb_f666_4725_80c5_34b28483abea"]
[2024/05/05 11:25:43.943 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:44.039 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:25:44.048 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:44.049 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:25:44.050 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:25:44.054 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:25:44.118 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:25:44.124 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:25:44.166 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:44.209 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:25:44.230 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:44.240 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:25:44.253 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:25:44.330 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:44.365 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:25:44.366 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:44.411 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:25:44.509 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:44.519 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:25:44.520 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:25:44.525 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:25:44.526 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:44.547 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:25:44.559 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:44.619 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:25:44.635 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:25:44.650 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:25:44.664 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:25:44.667 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:44.744 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:25:44.821 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:25:44.829 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:25:44.831 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:25:44.961 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:45.003 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:25:45.022 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:25:45.028 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:25:45.032 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:25:45.040 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:45.067 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:25:45.126 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
+ set +x
table changefeed_error.usertable not exists for 1-th check, retry later
[2024/05/05 11:25:45.150 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:25:45.151 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:25:45.167 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:25:45.199 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:45.242 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:25:45.254 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:25:45.266 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:45.266 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:45.433 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:25:45.436 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:25:45.442 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:25:45.446 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:25:45.455 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:25:45.507 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:25:45.541 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:25:45.562 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:45.619 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:45.619 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:25:45.637 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:25:45.642 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
table ddl_manager.finish_mark not exists for 73-th check, retry later
check diff failed 3-th time, retry later
table partition_table.finish_mark not exists for 7-th check, retry later
[2024/05/05 11:25:45.703 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:25:45.707 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:45.724 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:25:45.728 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:25:45.830 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:25:45.833 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:45.841 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:25:45.848 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:25:45.858 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:25:45.913 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.cli.9043.out cli tso query --pd=http://127.0.0.1:2379
[2024/05/05 11:25:45.956 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:25:45.962 +08:00] [INFO] [main.go:88] ["testGetDefaultValue take 14.154637919s"]
[2024/05/05 11:25:46.018 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:25:46.041 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:25:46.050 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:25:46.104 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:46.117 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:25:46.148 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:25:46.156 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
start tidb cluster in /tmp/tidb_cdc_test/kafka_simple_basic_avro
Starting Upstream PD...
[2024/05/05 11:25:46.203 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:25:46.209 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:25:46.269 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:46.269 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:25:46.303 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:25:46.306 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:25:46.326 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:25:46.339 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:46.402 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:25:46.434 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:25:46.454 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:25:46.457 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:25:46.531 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:25:46.531 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:25:46.562 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:25:46.621 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:25:46.630 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:25:46.639 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
[2024/05/05 11:25:46.739 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:25:46.740 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:25:46.744 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:25:46.749 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:25:46.805 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:25:46.825 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:25:46.854 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:25:46.931 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
table changefeed_error.usertable exists
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
[2024/05/05 11:25:46.952 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:46.952 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:25:47.035 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:25:47.036 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:25:47.110 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:25:47.133 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:47.134 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:25:47.150 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
+ set +x
+ tso='449545383625883650
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545383625883650 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
check diff failed 1-th time, retry later
[2024/05/05 11:25:47.232 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:25:47.238 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:25:47.245 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:25:47.245 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:25:47.256 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:25:47.315 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:25:47.340 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:25:47.366 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:25:47.426 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:25:47.428 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
table ddl_manager.finish_mark not exists for 74-th check, retry later
[2024/05/05 11:25:47.465 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:25:47.466 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:25:47.533 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:25:47.549 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:25:47.552 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:25:47.561 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:25:47.648 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:25:47.648 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:25:47.660 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:25:47.661 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
check diff failed 4-th time, retry later
[2024/05/05 11:25:47.699 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:25:47.729 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:25:47.753 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:25:47.812 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:25:47.842 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:25:47.849 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:25:47.921 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:25:47.922 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:25:47.956 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:25:48.004 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:25:48.004 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:25:48.018 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:25:48.062 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:25:48.116 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:25:48.126 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:25:48.145 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:25:48.160 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:25:48.161 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
***************** properties *****************
"readproportion"="0"
"mysql.db"="multi_capture_1"
"dotransactions"="false"
"mysql.user"="root"
"workload"="core"
"requestdistribution"="uniform"
"readallfields"="true"
"operationcount"="0"
"scanproportion"="0"
"insertproportion"="0"
"mysql.port"="4000"
"updateproportion"="0"
"threadcount"="2"
"recordcount"="10"
"mysql.host"="127.0.0.1"
**********************************************
Run finished, takes 9.480464ms
INSERT - Takes(s): 0.0, Count: 10, OPS: 1962.4, Avg(us): 1817, Min(us): 1012, Max(us): 4278, 95th(us): 5000, 99th(us): 5000
table partition_table.finish_mark exists
check diff successfully
[2024/05/05 11:25:48.230 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:25:48.236 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:25:48.265 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:25:48.324 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:25:48.348 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:25:48.356 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:25:48.425 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:25:48.427 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:25:48.435 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:25:48.436 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
wait process cdc.test exit for 1-th time...
[2024/05/05 11:25:48.470 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:25:48.527 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:25:48.546 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:25:48.599 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:25:48.615 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:25:48.620 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:25:48.648 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:25:48.651 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:25:48.710 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:25:48.740 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:25:48.757 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:25:48.769 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:25:48.907 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:25:48.907 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:25:48.926 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:25:48.931 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"]
[2024/05/05 11:25:49.019 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:25:49.043 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:25:49.053 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:25:49.138 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:25:49.146 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:25:49.159 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:25:49.213 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
wait process cdc.test exit for 2-th time...
[2024/05/05 11:25:49.232 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"]
[2024/05/05 11:25:49.261 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:25:49.262 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:25:49.304 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:25:49.321 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:25:49.353 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:25:49.365 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:25:49.409 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:25:49.424 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"]
[2024/05/05 11:25:49.459 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
table ddl_manager.finish_mark not exists for 75-th check, retry later
***************** properties *****************
"mysql.db"="multi_capture_2"
"updateproportion"="0"
"mysql.port"="4000"
"operationcount"="0"
"readallfields"="true"
"dotransactions"="false"
"mysql.host"="127.0.0.1"
"mysql.user"="root"
"threadcount"="2"
"workload"="core"
"insertproportion"="0"
"recordcount"="10"
"readproportion"="0"
"scanproportion"="0"
"requestdistribution"="uniform"
**********************************************
Run finished, takes 8.630483ms
INSERT - Takes(s): 0.0, Count: 10, OPS: 2040.9, Avg(us): 1646, Min(us): 880, Max(us): 3584, 95th(us): 4000, 99th(us): 4000
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
[2024/05/05 11:25:49.514 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:25:49.522 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"]
[2024/05/05 11:25:49.600 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:25:49.614 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"]
[2024/05/05 11:25:49.646 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:25:49.648 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:25:49.725 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"]
[2024/05/05 11:25:49.735 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"]
wait process cdc.test exit for 3-th time...
check diff successfully
***************** properties *****************
"recordcount"="20"
"readproportion"="0"
"mysql.host"="127.0.0.1"
"readallfields"="true"
"workload"="core"
"requestdistribution"="uniform"
"mysql.user"="root"
"updateproportion"="0"
"scanproportion"="0"
"mysql.db"="changefeed_error"
"operationcount"="0"
"dotransactions"="false"
"mysql.port"="4000"
"insertproportion"="0"
"threadcount"="4"
**********************************************
Run finished, takes 5.016666ms
INSERT - Takes(s): 0.0, Count: 20, OPS: 5395.5, Avg(us): 848, Min(us): 448, Max(us): 1781, 95th(us): 2000, 99th(us): 2000
check diff successfully
{"id":"380e255b-4dc8-427a-a472-5b88486e5e84","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879538}
check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/capture' 'capture'
+ key_prefix=/tidb/cdc/default/__cdc_meta__/capture
+ message=capture
++ etcdctl get /tidb/cdc/default/__cdc_meta__/capture --prefix --keys-only
+ info=/tidb/cdc/default/__cdc_meta__/capture/380e255b-4dc8-427a-a472-5b88486e5e84
+ [[ /tidb/cdc/default/__cdc_meta__/capture/380e255b-4dc8-427a-a472-5b88486e5e84 =~ capture ]]
+ echo 'capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/380e255b-4dc8-427a-a472-5b88486e5e84'
capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/380e255b-4dc8-427a-a472-5b88486e5e84
+ echo 'check failed'
check failed
+ exit 1
run task failed 1-th time, retry later
[2024/05/05 11:25:49.749 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:25:49.755 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:25:49.811 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"]
[2024/05/05 11:25:49.833 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:25:49.843 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:25:49.868 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:25:49.915 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"]
[2024/05/05 11:25:49.947 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"]
[2024/05/05 11:25:49.949 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"]
[2024/05/05 11:25:49.963 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"]
cdc.test: no process found
wait process cdc.test exit for 4-th time...
process cdc.test already exit
[Sun May  5 11:25:50 CST 2024] <<<<<< run test case partition_table success! >>>>>>
[2024/05/05 11:25:50.031 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:25:50.047 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"]
[2024/05/05 11:25:50.118 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:25:50.128 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"]
[2024/05/05 11:25:50.156 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"]
[2024/05/05 11:25:50.219 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"]
[2024/05/05 11:25:50.220 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
check diff failed 5-th time, retry later
[2024/05/05 11:25:50.251 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:25:50.303 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"]
[2024/05/05 11:25:50.308 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"]
[2024/05/05 11:25:50.330 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:25:50.367 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:25:50.422 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"]
[2024/05/05 11:25:50.432 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"]
[2024/05/05 11:25:50.443 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"]
[2024/05/05 11:25:50.457 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"]
***************** properties *****************
"recordcount"="10"
"workload"="core"
"requestdistribution"="uniform"
"updateproportion"="0"
"threadcount"="2"
"readproportion"="0"
"insertproportion"="0"
"scanproportion"="0"
"readallfields"="true"
"mysql.host"="127.0.0.1"
"dotransactions"="false"
"mysql.db"="multi_capture_3"
"operationcount"="0"
"mysql.user"="root"
"mysql.port"="4000"
**********************************************
Run finished, takes 8.378322ms
INSERT - Takes(s): 0.0, Count: 10, OPS: 2191.2, Avg(us): 1605, Min(us): 931, Max(us): 3694, 95th(us): 4000, 99th(us): 4000
[2024/05/05 11:25:50.514 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"]
[2024/05/05 11:25:50.527 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"]
[2024/05/05 11:25:50.573 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:25:50.621 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"]
[2024/05/05 11:25:50.650 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"]
[2024/05/05 11:25:50.652 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"]
[2024/05/05 11:25:50.668 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:25:50.741 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"]
[2024/05/05 11:25:50.806 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"]
[2024/05/05 11:25:50.827 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"]
[2024/05/05 11:25:50.871 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"]
[2024/05/05 11:25:50.912 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"]
[2024/05/05 11:25:51.037 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"]
[2024/05/05 11:25:51.203 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"]
[2024/05/05 11:25:51.251 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"]
table ddl_manager.finish_mark not exists for 76-th check, retry later
***************** properties *****************
"dotransactions"="false"
"scanproportion"="0"
"requestdistribution"="uniform"
"insertproportion"="0"
"mysql.user"="root"
"operationcount"="0"
"readallfields"="true"
"mysql.db"="multi_capture_4"
"workload"="core"
"updateproportion"="0"
"recordcount"="10"
"mysql.host"="127.0.0.1"
"readproportion"="0"
"threadcount"="2"
"mysql.port"="4000"
**********************************************
Run finished, takes 9.432926ms
INSERT - Takes(s): 0.0, Count: 10, OPS: 1949.4, Avg(us): 1784, Min(us): 1052, Max(us): 4196, 95th(us): 5000, 99th(us): 5000
[Sun May  5 11:25:51 CST 2024] <<<<<< START cdc server in multi_capture case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.91799181.out server --log-file /tmp/tidb_cdc_test/multi_capture/cdc1.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_capture/cdc_data1 --cluster-id default --addr 127.0.0.1:8301
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8301 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8301; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
[2024/05/05 11:25:51.343 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsd64fb0bd_7bba_4938_af74_365ef5334cee"]
[2024/05/05 11:25:51.431 +08:00] [INFO] [main.go:178] ["72 insert success: 1600"]
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/capture' 'capture'
+ key_prefix=/tidb/cdc/default/__cdc_meta__/capture
+ message=capture
++ etcdctl get /tidb/cdc/default/__cdc_meta__/capture --prefix --keys-only
+ info=/tidb/cdc/default/__cdc_meta__/capture/380e255b-4dc8-427a-a472-5b88486e5e84
+ [[ /tidb/cdc/default/__cdc_meta__/capture/380e255b-4dc8-427a-a472-5b88486e5e84 =~ capture ]]
+ echo 'capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/380e255b-4dc8-427a-a472-5b88486e5e84'
capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/380e255b-4dc8-427a-a472-5b88486e5e84
+ echo 'check failed'
check failed
+ exit 1
run task failed 2-th time, retry later
[2024/05/05 11:25:51.569 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"]
[2024/05/05 11:25:51.596 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsa41da281_f9ae_41f0_a959_8a34752e7e35"]
[2024/05/05 11:25:51.609 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"]
[2024/05/05 11:25:51.639 +08:00] [INFO] [main.go:178] ["73 insert success: 1700"]
[2024/05/05 11:25:51.671 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:25:51.675 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:51.687 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs0fe5f921_96fc_4efa_92dd_f287cd367f58"]
[2024/05/05 11:25:51.742 +08:00] [INFO] [main.go:178] ["72 insert success: 1700"]
[2024/05/05 11:25:51.824 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsb8280e3e_d434_49d7_8137_27a31dbd104c"]
[2024/05/05 11:25:51.853 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsbef6667c_8ab0_4f17_80f5_e0708eb15543"]
[2024/05/05 11:25:51.874 +08:00] [INFO] [main.go:178] ["73 insert success: 1800"]
[2024/05/05 11:25:51.925 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:25:51.932 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:51.936 +08:00] [INFO] [main.go:178] ["72 insert success: 1800"]
[2024/05/05 11:25:51.985 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsd03df252_bfc1_4451_bc47_f940b48bef0f"]
[2024/05/05 11:25:51.987 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:25:52.013 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
check diff successfully
[2024/05/05 11:25:52.027 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:25:52.137 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:52.202 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:25:52.206 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:52.241 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:25:52.244 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:52.328 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:25:52.344 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:25:52.412 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:52.416 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:25:52.425 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:52.431 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:25:52.437 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:25:52.538 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:25:52.608 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:25:52.609 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:25:52.645 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:25:52.646 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:25:52.729 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:52.738 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:25:52.778 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:25:52.782 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:25:52.814 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:52.817 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:25:52.822 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:52.906 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs02b29ec8_48ce_467d_80d5_4ccd97bcdab7"]
[2024/05/05 11:25:52.935 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
table ddl_manager.finish_mark not exists for 77-th check, retry later
[2024/05/05 11:25:53.110 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:53.136 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:25:53.211 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:53.232 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs81871313_84c6_4792_97c0_4c591d2a5037"]
[2024/05/05 11:25:53.234 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[2024/05/05 11:25:53.409 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:25:53.438 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:53.450 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:53.450 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:25:53.459 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:25:53.511 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:25:53.515 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:25:53.533 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:25:53.544 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:53.562 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:53.641 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
check diff failed 1-th time, retry later
[2024/05/05 11:25:53.812 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:53.812 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:25:53.817 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:25:53.830 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:25:53.830 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:54.018 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:25:54.124 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:25:54.126 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:25:54.135 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:25:54.141 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:25:54.237 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:25:54.239 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:54.245 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:25:54.328 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:25:54.332 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:25:54.348 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8301 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8301 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8301
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:25:54 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/378c8304-c1b3-40fa-935c-00e7be26b931
	{"id":"378c8304-c1b3-40fa-935c-00e7be26b931","address":"127.0.0.1:8301","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879551}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8ffea4d
	378c8304-c1b3-40fa-935c-00e7be26b931

/tidb/cdc/default/default/upstream/7365351505060474996
	{"id":7365351505060474996,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/378c8304-c1b3-40fa-935c-00e7be26b931
	{"id":"378c8304-c1b3-40fa-935c-00e7be26b931","address":"127.0.0.1:8301","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879551}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8ffea4d
	378c8304-c1b3-40fa-935c-00e7be26b931

/tidb/cdc/default/default/upstream/7365351505060474996
	{"id":7365351505060474996,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/378c8304-c1b3-40fa-935c-00e7be26b931
	{"id":"378c8304-c1b3-40fa-935c-00e7be26b931","address":"127.0.0.1:8301","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879551}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8ffea4d
	378c8304-c1b3-40fa-935c-00e7be26b931

/tidb/cdc/default/default/upstream/7365351505060474996
	{"id":7365351505060474996,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
[Sun May  5 11:25:54 CST 2024] <<<<<< START cdc server in multi_capture case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ GO_FAILPOINTS=
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.92359237.out server --log-file /tmp/tidb_cdc_test/multi_capture/cdc2.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_capture/cdc_data2 --cluster-id default --addr 127.0.0.1:8302
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8302 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8302; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
[2024/05/05 11:25:54.505 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:25:54.519 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:25:54.533 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:25:54.540 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:25:54.541 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:25:54.632 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:25:54.741 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:25:54.806 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:25:54.810 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:25:54.831 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:25:54.929 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:54.936 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:25:55.014 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:55.037 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:25:55.038 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:25:55.047 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
table ddl_manager.finish_mark not exists for 78-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[2024/05/05 11:25:55.216 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:25:55.232 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:25:55.254 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:25:55.299 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:25:55.308 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:25:55.323 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:25:55.357 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:25:55.362 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:25:55.367 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:25:55.419 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:55.435 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:25:55.501 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:25:55.599 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:25:55.703 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/capture' 'capture'
+ key_prefix=/tidb/cdc/default/__cdc_meta__/capture
+ message=capture
++ etcdctl get /tidb/cdc/default/__cdc_meta__/capture --prefix --keys-only
+ info=/tidb/cdc/default/__cdc_meta__/capture/380e255b-4dc8-427a-a472-5b88486e5e84
+ [[ /tidb/cdc/default/__cdc_meta__/capture/380e255b-4dc8-427a-a472-5b88486e5e84 =~ capture ]]
+ echo 'capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/380e255b-4dc8-427a-a472-5b88486e5e84'
capture contains in etcd /tidb/cdc/default/__cdc_meta__/capture/380e255b-4dc8-427a-a472-5b88486e5e84
+ echo 'check failed'
check failed
+ exit 1
run task failed 3-th time, retry later
check diff failed 2-th time, retry later
[2024/05/05 11:25:55.709 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:55.712 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:55.840 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:25:55.903 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:25:55.950 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:25:56.002 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:56.009 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:56.013 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:25:56.042 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:25:56.043 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:25:56.047 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:25:56.099 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:25:56.101 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:25:56.112 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:56.141 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:25:56.337 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:25:56.340 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:25:56.412 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[2024/05/05 11:25:56.530 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:25:56.603 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:25:56.726 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:25:56.729 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:25:56.739 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:25:56.810 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:25:56.820 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:25:56.820 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:25:56.823 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:25:56.823 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:25:56.835 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:25:56.844 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:25:56.906 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:25:57.027 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:25:57.041 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:25:57.045 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:25:57.226 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:25:57.319 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:25:57.512 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:25:57.515 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:25:57.618 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:25:57.629 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:25:57.631 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:25:57.632 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:25:57.632 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:25:57.636 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:25:57.653 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:25:57.711 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:25:57.712 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
table ddl_manager.finish_mark not exists for 79-th check, retry later
check diff failed 3-th time, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8302 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8302 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8302
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:25:57 GMT
< Content-Length: 1271
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/378c8304-c1b3-40fa-935c-00e7be26b931
	{"id":"378c8304-c1b3-40fa-935c-00e7be26b931","address":"127.0.0.1:8301","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879551}

/tidb/cdc/default/__cdc_meta__/capture/c859ae66-2848-4e68-96a8-060efde70498
	{"id":"c859ae66-2848-4e68-96a8-060efde70498","address":"127.0.0.1:8302","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879554}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8ffea4d
	378c8304-c1b3-40fa-935c-00e7be26b931

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8ffea87
	c859ae66-2848-4e68-96a8-060efde70498

/tidb/cdc/default/default/upstream/7365351505060474996
	{"id":7365351505060474996,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/378c8304-c1b3-40fa-935c-00e7be26b931
	{"id":"378c8304-c1b3-40fa-935c-00e7be26b931","address":"127.0.0.1:8301","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879551}

/tidb/cdc/default/__cdc_meta__/capture/c859ae66-2848-4e68-96a8-060efde70498
	{"id":"c859ae66-2848-4e68-96a8-060efde70498","address":"127.0.0.1:8302","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879554}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8ffea4d
	378c8304-c1b3-40fa-935c-00e7be26b931

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8ffea87
	c859ae66-2848-4e68-96a8-060efde70498

/tidb/cdc/default/default/upstream/7365351505060474996
	{"id":7365351505060474996,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/378c8304-c1b3-40fa-935c-00e7be26b931
	{"id":"378c8304-c1b3-40fa-935c-00e7be26b931","address":"127.0.0.1:8301","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879551}

/tidb/cdc/default/__cdc_meta__/capture/c859ae66-2848-4e68-96a8-060efde70498
	{"id":"c859ae66-2848-4e68-96a8-060efde70498","address":"127.0.0.1:8302","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879554}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8ffea4d
	378c8304-c1b3-40fa-935c-00e7be26b931

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8ffea87
	c859ae66-2848-4e68-96a8-060efde70498

/tidb/cdc/default/default/upstream/7365351505060474996
	{"id":7365351505060474996,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
[Sun May  5 11:25:57 CST 2024] <<<<<< START cdc server in multi_capture case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ GO_FAILPOINTS=
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8303/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.92919293.out server --log-file /tmp/tidb_cdc_test/multi_capture/cdc3.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_capture/cdc_data3 --cluster-id default --addr 127.0.0.1:8303
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8303/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8303 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8303; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
[2024/05/05 11:25:57.759 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:25:57.809 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:25:57.810 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:57.845 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:25:57.865 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[Pipeline] // timeout
[Pipeline] // timeout
[2024/05/05 11:25:58.002 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:25:58.027 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:25:58.227 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:25:58.234 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:25:58.235 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:25:58.241 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:25:58.241 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[2024/05/05 11:25:58.301 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:25:58.407 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:25:58.415 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:25:58.423 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:25:58.504 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:25:58.534 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:25:58.544 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:25:58.558 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:25:58.603 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:25:58.637 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:25:58.720 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[2024/05/05 11:25:58.833 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:25:58.838 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:25:58.841 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:25:58.908 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:25:58.917 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:25:58.918 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[Pipeline] // container
[Pipeline] // container
[Pipeline] // container
[Pipeline] // container
[Pipeline] // container
[2024/05/05 11:25:59.039 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:25:59.105 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:25:59.137 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:25:59.214 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:25:59.228 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:25:59.232 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:25:59.233 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[Pipeline] // container
[Pipeline] sh
[2024/05/05 11:25:59.310 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:25:59.350 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:25:59.430 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:25:59.440 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:25:59.516 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[Pipeline] sh
table ddl_manager.finish_mark not exists for 80-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b25af2c0012	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:8686, start at 2024-05-05 11:25:56.583781801 +0800 CST m=+5.424761909	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:27:56.591 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:25:56.555 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:15:56.555 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b25af2c0012	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:8686, start at 2024-05-05 11:25:56.583781801 +0800 CST m=+5.424761909	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:27:56.591 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:25:56.555 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:15:56.555 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b25b0b0000f	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:8766, start at 2024-05-05 11:25:56.667458064 +0800 CST m=+5.454431781	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:27:56.674 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:25:56.652 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:15:56.652 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/kafka_simple_basic_avro/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/kafka_simple_basic_avro/tiflash/log/error.log
arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_simple_basic_avro/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_simple_basic_avro/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_simple_basic_avro/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
[2024/05/05 11:25:59.525 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:25:59.534 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:25:59.726 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:25:59.745 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
+ rm -rf /tmp/tidb_cdc_test
+ mkdir -p /tmp/tidb_cdc_test
+ chmod +x ./tests/integration_tests/run_group.sh
+ ./tests/integration_tests/run_group.sh kafka G17
Run cases: clustered_index processor_resolved_ts_fallback
PROW_JOB_ID=d886c6eb-17f2-4f64-86e7-91214509cf93
JENKINS_NODE_COOKIE=90bba969-97f4-4523-82a7-8119aa0a40e8
BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/
GOLANG_VERSION=1.21.0
HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-ckb5f-rwq9s
HUDSON_SERVER_COOKIE=83ef27fe9acccc92
KUBERNETES_PORT=tcp://10.233.0.1:443
KUBERNETES_PORT_443_TCP_PORT=443
TERM=xterm
STAGE_NAME=Test
BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855
KUBERNETES_SERVICE_PORT=443
GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009
JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786957579159605252","prowjobid":"d886c6eb-17f2-4f64-86e7-91214509cf93","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","author_link":"https://github.com/lidezhu"}]}}
KUBERNETES_SERVICE_HOST=10.233.0.1
WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/
RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=changes
RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=artifacts
FILE_SERVER_URL=http://fileserver.pingcap.net
JENKINS_HOME=/var/jenkins_home
GIT_COMMIT=03312178c534dce949face80c69812d989e55009
PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin
RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect
GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct
POD_CONTAINER=golang
PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
HUDSON_URL=https://do.pingcap.net/jenkins/
TICDC_COVERALLS_TOKEN=****
JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test
TZ=Asia/Shanghai
BUILD_DISPLAY_NAME=#1855
TEST_GROUP=G17
JENKINS_URL=https://do.pingcap.net/jenkins/
BUILD_ID=1786957579159605252
TICDC_CODECOV_TOKEN=****
GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742
JOB_BASE_NAME=pull_cdc_integration_kafka_test
GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009
RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=tests
SHLVL=5
HOME=/home/jenkins
POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-ckb5f
GOROOT=/usr/local/go
GIT_BRANCH=origin/main
KUBERNETES_PORT_443_TCP_PROTO=tcp
TINI_VERSION=v0.19.0
CI=true
KUBERNETES_SERVICE_PORT_HTTPS=443
WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp
EXECUTOR_NUMBER=0
JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a
NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-ckb5f-rwq9s pingcap_tiflow_pull_cdc_integration_kafka_test_1855-ckb5f
GIT_URL=https://github.com/PingCAP-QE/ci.git
HUDSON_HOME=/var/jenkins_home
CLASSPATH=
NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-ckb5f-rwq9s
GOPATH=/go
JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect
BUILD_NUMBER=1855
KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1
KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443
GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz
_=/usr/bin/env
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/clustered_index/run.sh using Sink-Type: kafka... <<=================
[Sun May  5 11:25:59 CST 2024] <<<<<< skip test case clustered_index for kafka! >>>>>>
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_resolved_ts_fallback/run.sh using Sink-Type: kafka... <<=================
[Sun May  5 11:25:59 CST 2024] <<<<<< run test case processor_resolved_ts_fallback success! >>>>>>
[Pipeline] sh
[2024/05/05 11:25:59.802 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:25:59.848 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:25:59.852 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:25:59.854 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:25:59.868 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:25:59.945 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:25:59.969 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
+ rm -rf /tmp/tidb_cdc_test
+ mkdir -p /tmp/tidb_cdc_test
+ chmod +x ./tests/integration_tests/run_group.sh
+ ./tests/integration_tests/run_group.sh kafka G16
Run cases: owner_resign processor_etcd_worker_delay sink_hang
PROW_JOB_ID=d886c6eb-17f2-4f64-86e7-91214509cf93
JENKINS_NODE_COOKIE=cc43034e-f228-410c-880c-8b1e3b6d5576
BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/
GOLANG_VERSION=1.21.0
HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wv6pc-4pcjp
HUDSON_SERVER_COOKIE=83ef27fe9acccc92
KUBERNETES_PORT_443_TCP_PORT=443
KUBERNETES_PORT=tcp://10.233.0.1:443
TERM=xterm
STAGE_NAME=Test
BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855
KUBERNETES_SERVICE_PORT=443
GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009
JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786957579159605252","prowjobid":"d886c6eb-17f2-4f64-86e7-91214509cf93","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","author_link":"https://github.com/lidezhu"}]}}
KUBERNETES_SERVICE_HOST=10.233.0.1
WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/
RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=changes
RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=artifacts
FILE_SERVER_URL=http://fileserver.pingcap.net
JENKINS_HOME=/var/jenkins_home
GIT_COMMIT=03312178c534dce949face80c69812d989e55009
PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin
RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect
GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct
POD_CONTAINER=golang
PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
HUDSON_URL=https://do.pingcap.net/jenkins/
TICDC_COVERALLS_TOKEN=****
JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test
TZ=Asia/Shanghai
BUILD_DISPLAY_NAME=#1855
TEST_GROUP=G16
JENKINS_URL=https://do.pingcap.net/jenkins/
BUILD_ID=1786957579159605252
TICDC_CODECOV_TOKEN=****
GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742
JOB_BASE_NAME=pull_cdc_integration_kafka_test
GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009
RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=tests
SHLVL=5
HOME=/home/jenkins
POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-wv6pc
GOROOT=/usr/local/go
GIT_BRANCH=origin/main
KUBERNETES_PORT_443_TCP_PROTO=tcp
TINI_VERSION=v0.19.0
CI=true
KUBERNETES_SERVICE_PORT_HTTPS=443
WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp
EXECUTOR_NUMBER=0
JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a
NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wv6pc-4pcjp pingcap_tiflow_pull_cdc_integration_kafka_test_1855-wv6pc
GIT_URL=https://github.com/PingCAP-QE/ci.git
HUDSON_HOME=/var/jenkins_home
CLASSPATH=
NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wv6pc-4pcjp
GOPATH=/go
JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect
BUILD_NUMBER=1855
KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1
KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443
GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz
_=/usr/bin/env
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/owner_resign/run.sh using Sink-Type: kafka... <<=================
[Sun May  5 11:26:00 CST 2024] <<<<<< run test case owner_resign success! >>>>>>
[2024/05/05 11:26:00.045 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:26:00.046 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:26:00.050 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
check diff failed 4-th time, retry later
[Pipeline] sh
[2024/05/05 11:26:00.328 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:26:00.340 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:26:00.420 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:26:00.460 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:26:00.469 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:26:00.510 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
+ rm -rf /tmp/tidb_cdc_test
+ mkdir -p /tmp/tidb_cdc_test
+ chmod +x ./tests/integration_tests/run_group.sh
+ ./tests/integration_tests/run_group.sh kafka G15
Run cases: new_ci_collation batch_add_table multi_rocks
PROW_JOB_ID=d886c6eb-17f2-4f64-86e7-91214509cf93
JENKINS_NODE_COOKIE=5fd6357b-57fa-4d16-87e6-1410ace96219
BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/
GOLANG_VERSION=1.21.0
HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-jb9b1-r2rkj
HUDSON_SERVER_COOKIE=83ef27fe9acccc92
KUBERNETES_PORT=tcp://10.233.0.1:443
KUBERNETES_PORT_443_TCP_PORT=443
TERM=xterm
STAGE_NAME=Test
BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855
KUBERNETES_SERVICE_PORT=443
GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009
JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786957579159605252","prowjobid":"d886c6eb-17f2-4f64-86e7-91214509cf93","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","author_link":"https://github.com/lidezhu"}]}}
KUBERNETES_SERVICE_HOST=10.233.0.1
WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/
RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=changes
RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=artifacts
FILE_SERVER_URL=http://fileserver.pingcap.net
JENKINS_HOME=/var/jenkins_home
GIT_COMMIT=03312178c534dce949face80c69812d989e55009
PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin
RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect
GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct
POD_CONTAINER=golang
PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
HUDSON_URL=https://do.pingcap.net/jenkins/
TICDC_COVERALLS_TOKEN=****
JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test
TZ=Asia/Shanghai
BUILD_DISPLAY_NAME=#1855
TEST_GROUP=G15
JENKINS_URL=https://do.pingcap.net/jenkins/
BUILD_ID=1786957579159605252
TICDC_CODECOV_TOKEN=****
GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742
JOB_BASE_NAME=pull_cdc_integration_kafka_test
GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009
RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=tests
SHLVL=5
HOME=/home/jenkins
POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-jb9b1
GOROOT=/usr/local/go
GIT_BRANCH=origin/main
KUBERNETES_PORT_443_TCP_PROTO=tcp
TINI_VERSION=v0.19.0
CI=true
KUBERNETES_SERVICE_PORT_HTTPS=443
WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp
EXECUTOR_NUMBER=0
JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a
NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-jb9b1-r2rkj pingcap_tiflow_pull_cdc_integration_kafka_test_1855-jb9b1
GIT_URL=https://github.com/PingCAP-QE/ci.git
HUDSON_HOME=/var/jenkins_home
CLASSPATH=
NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-jb9b1-r2rkj
GOPATH=/go
JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect
BUILD_NUMBER=1855
KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1
KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443
GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz
_=/usr/bin/env
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/new_ci_collation/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
[Pipeline] sh
[2024/05/05 11:26:00.563 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:26:00.565 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:26:00.628 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:26:00.638 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:26:00.741 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
+ rm -rf /tmp/tidb_cdc_test
+ mkdir -p /tmp/tidb_cdc_test
+ chmod +x ./tests/integration_tests/run_group.sh
+ ./tests/integration_tests/run_group.sh kafka G12
Run cases: many_pk_or_uk capture_session_done_during_task ddl_attributes
PROW_JOB_ID=d886c6eb-17f2-4f64-86e7-91214509cf93
JENKINS_NODE_COOKIE=3269f172-a560-4766-8eb8-2ba57dc9ef8c
BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/
GOLANG_VERSION=1.21.0
HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-klk36-npv9p
HUDSON_SERVER_COOKIE=83ef27fe9acccc92
KUBERNETES_PORT=tcp://10.233.0.1:443
KUBERNETES_PORT_443_TCP_PORT=443
TERM=xterm
STAGE_NAME=Test
BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855
KUBERNETES_SERVICE_PORT=443
GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009
JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786957579159605252","prowjobid":"d886c6eb-17f2-4f64-86e7-91214509cf93","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","author_link":"https://github.com/lidezhu"}]}}
KUBERNETES_SERVICE_HOST=10.233.0.1
WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/
RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=changes
RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=artifacts
FILE_SERVER_URL=http://fileserver.pingcap.net
JENKINS_HOME=/var/jenkins_home
GIT_COMMIT=03312178c534dce949face80c69812d989e55009
PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin
RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect
GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct
POD_CONTAINER=golang
PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
HUDSON_URL=https://do.pingcap.net/jenkins/
TICDC_COVERALLS_TOKEN=****
JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test
TZ=Asia/Shanghai
BUILD_DISPLAY_NAME=#1855
TEST_GROUP=G12
JENKINS_URL=https://do.pingcap.net/jenkins/
BUILD_ID=1786957579159605252
TICDC_CODECOV_TOKEN=****
GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742
JOB_BASE_NAME=pull_cdc_integration_kafka_test
GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009
RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=tests
SHLVL=5
HOME=/home/jenkins
POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-klk36
GOROOT=/usr/local/go
GIT_BRANCH=origin/main
KUBERNETES_PORT_443_TCP_PROTO=tcp
TINI_VERSION=v0.19.0
CI=true
KUBERNETES_SERVICE_PORT_HTTPS=443
WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp
EXECUTOR_NUMBER=0
JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a
NODE_LABELS=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-klk36-npv9p pingcap_tiflow_pull_cdc_integration_kafka_test_1855-klk36
GIT_URL=https://github.com/PingCAP-QE/ci.git
HUDSON_HOME=/var/jenkins_home
CLASSPATH=
NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-klk36-npv9p
GOPATH=/go
JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect
BUILD_NUMBER=1855
KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1
KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443
GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz
_=/usr/bin/env
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/many_pk_or_uk/run.sh using Sink-Type: kafka... <<=================
[Pipeline] sh
[2024/05/05 11:26:00.852 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:26:00.904 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:26:00.924 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:26:00.970 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:26:00.975 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:26:01.034 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:26:01.052 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"]
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8303/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8303 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8303 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8303
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:26:00 GMT
< Content-Length: 1750
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/378c8304-c1b3-40fa-935c-00e7be26b931
	{"id":"378c8304-c1b3-40fa-935c-00e7be26b931","address":"127.0.0.1:8301","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879551}

/tidb/cdc/default/__cdc_meta__/capture/c859ae66-2848-4e68-96a8-060efde70498
	{"id":"c859ae66-2848-4e68-96a8-060efde70498","address":"127.0.0.1:8302","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879554}

/tidb/cdc/default/__cdc_meta__/capture/ff9c2baf-5fe3-4f99-acaa-af83015429d6
	{"id":"ff9c2baf-5fe3-4f99-acaa-af83015429d6","address":"127.0.0.1:8303","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879557}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8ffea4d
	378c8304-c1b3-40fa-935c-00e7be26b931

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8ffea87
	c859ae66-2848-4e68-96a8-060efde70498

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8ffeaa5
	ff9c2baf-5fe3-4f99-acaa-af83015429d6

/tidb/cdc/default/default/upstream/7365351505060474996
	{"id":7365351505060474996,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/378c8304-c1b3-40fa-935c-00e7be26b931
	{"id":"378c8304-c1b3-40fa-935c-00e7be26b931","address":"127.0.0.1:8301","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879551}

/tidb/cdc/default/__cdc_meta__/capture/c859ae66-2848-4e68-96a8-060efde70498
	{"id":"c859ae66-2848-4e68-96a8-060efde70498","address":"127.0.0.1:8302","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879554}

/tidb/cdc/default/__cdc_meta__/capture/ff9c2baf-5fe3-4f99-acaa-af83015429d6
	{"id":"ff9c2baf-5fe3-4f99-acaa-af83015429d6","address":"127.0.0.1:8303","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879557}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8ffea4d
	378c8304-c1b3-40fa-935c-00e7be26b931

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8ffea87
	c859ae66-2848-4e68-96a8-060efde70498

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8ffeaa5
	ff9c2baf-5fe3-4f99-acaa-af83015429d6

/tidb/cdc/default/default/upstream/7365351505060474996
	{"id":7365351505060474996,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/378c8304-c1b3-40fa-935c-00e7be26b931
	{"id":"378c8304-c1b3-40fa-935c-00e7be26b931","address":"127.0.0.1:8301","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879551}

/tidb/cdc/default/__cdc_meta__/capture/c859ae66-2848-4e68-96a8-060efde70498
	{"id":"c859ae66-2848-4e68-96a8-060efde70498","address":"127.0.0.1:8302","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879554}

/tidb/cdc/default/__cdc_meta__/capture/ff9c2baf-5fe3-4f99-acaa-af83015429d6
	{"id":"ff9c2baf-5fe3-4f99-acaa-af83015429d6","address":"127.0.0.1:8303","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879557}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8ffea4d
	378c8304-c1b3-40fa-935c-00e7be26b931

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8ffea87
	c859ae66-2848-4e68-96a8-060efde70498

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8ffeaa5
	ff9c2baf-5fe3-4f99-acaa-af83015429d6

/tidb/cdc/default/default/upstream/7365351505060474996
	{"id":7365351505060474996,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.cli.9346.out cli changefeed create --start-ts=449545383625883650 '--sink-uri=kafka://127.0.0.1:9092/ticdc-multi-capture-test-8054?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --server=127.0.0.1:8301
Create changefeed successfully!
ID: 1efd7a07-69fc-4286-8320-2596e78aefd0
Info: {"upstream_id":7365351505060474996,"namespace":"default","id":"1efd7a07-69fc-4286-8320-2596e78aefd0","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-capture-test-8054?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:26:01.059407537+08:00","start_ts":449545383625883650,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545383625883650,"checkpoint_ts":449545383625883650,"checkpoint_time":"2024-05-05 11:25:45.692"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
+ rm -rf /tmp/tidb_cdc_test
+ mkdir -p /tmp/tidb_cdc_test
+ chmod +x ./tests/integration_tests/run_group.sh
+ ./tests/integration_tests/run_group.sh kafka G13
Run cases: tiflash region_merge common_1
PROW_JOB_ID=d886c6eb-17f2-4f64-86e7-91214509cf93
JENKINS_NODE_COOKIE=1730ac3c-f96b-4b9c-9eea-6f44dc50a1e2
BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/
GOLANG_VERSION=1.21.0
HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-nbv84-f5n24
HUDSON_SERVER_COOKIE=83ef27fe9acccc92
KUBERNETES_PORT_443_TCP_PORT=443
KUBERNETES_PORT=tcp://10.233.0.1:443
TERM=xterm
STAGE_NAME=Test
BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855
KUBERNETES_SERVICE_PORT=443
GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009
JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786957579159605252","prowjobid":"d886c6eb-17f2-4f64-86e7-91214509cf93","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","author_link":"https://github.com/lidezhu"}]}}
KUBERNETES_SERVICE_HOST=10.233.0.1
WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/
RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=changes
RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=artifacts
FILE_SERVER_URL=http://fileserver.pingcap.net
JENKINS_HOME=/var/jenkins_home
GIT_COMMIT=03312178c534dce949face80c69812d989e55009
PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin
RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect
GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct
POD_CONTAINER=golang
PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
HUDSON_URL=https://do.pingcap.net/jenkins/
TICDC_COVERALLS_TOKEN=****
JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test
TZ=Asia/Shanghai
BUILD_DISPLAY_NAME=#1855
TEST_GROUP=G13
JENKINS_URL=https://do.pingcap.net/jenkins/
BUILD_ID=1786957579159605252
TICDC_CODECOV_TOKEN=****
GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742
JOB_BASE_NAME=pull_cdc_integration_kafka_test
GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009
RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=tests
SHLVL=5
HOME=/home/jenkins
POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-nbv84
GOROOT=/usr/local/go
GIT_BRANCH=origin/main
KUBERNETES_PORT_443_TCP_PROTO=tcp
TINI_VERSION=v0.19.0
CI=true
KUBERNETES_SERVICE_PORT_HTTPS=443
WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp
EXECUTOR_NUMBER=0
JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a
NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-nbv84 pingcap-tiflow-pull-cdc-integration-kafka-test-1855-nbv84-f5n24
GIT_URL=https://github.com/PingCAP-QE/ci.git
HUDSON_HOME=/var/jenkins_home
CLASSPATH=
NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-nbv84-f5n24
GOPATH=/go
JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect
BUILD_NUMBER=1855
KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1
KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443
GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz
_=/usr/bin/env
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/tiflash/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
[2024/05/05 11:26:01.104 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs1ebc9eca_a3fb_4f99_9ed0_30ef7dab764a"]
[2024/05/05 11:26:01.124 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"]
[2024/05/05 11:26:01.152 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs6ae7ffb3_de28_422a_8012_5fcc751854ca"]
[2024/05/05 11:26:01.308 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
+ rm -rf /tmp/tidb_cdc_test
+ mkdir -p /tmp/tidb_cdc_test
+ chmod +x ./tests/integration_tests/run_group.sh
+ ./tests/integration_tests/run_group.sh kafka G14
Run cases: changefeed_finish force_replicate_table
PROW_JOB_ID=d886c6eb-17f2-4f64-86e7-91214509cf93
JENKINS_NODE_COOKIE=7d26aa71-eeeb-484a-980c-e6f35719312b
BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/
GOLANG_VERSION=1.21.0
HOSTNAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-rdz18-z9rmg
HUDSON_SERVER_COOKIE=83ef27fe9acccc92
KUBERNETES_PORT=tcp://10.233.0.1:443
KUBERNETES_PORT_443_TCP_PORT=443
TERM=xterm
STAGE_NAME=Test
BUILD_TAG=jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855
KUBERNETES_SERVICE_PORT=443
GIT_PREVIOUS_COMMIT=03312178c534dce949face80c69812d989e55009
JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/pull_cdc_integration_kafka_test","buildid":"1786957579159605252","prowjobid":"d886c6eb-17f2-4f64-86e7-91214509cf93","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"master","base_sha":"be1553484fe4c03594eabb8d7435c694e5fd7224","base_link":"https://github.com/pingcap/tiflow/commit/be1553484fe4c03594eabb8d7435c694e5fd7224","pulls":[{"number":10919,"author":"lidezhu","sha":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","title":"*(ticdc): split old update kv entry after restarting changefeed","link":"https://github.com/pingcap/tiflow/pull/10919","commit_link":"https://github.com/pingcap/tiflow/pull/10919/commits/0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","author_link":"https://github.com/lidezhu"}]}}
KUBERNETES_SERVICE_HOST=10.233.0.1
WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test
JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/
RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=changes
RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=artifacts
FILE_SERVER_URL=http://fileserver.pingcap.net
JENKINS_HOME=/var/jenkins_home
GIT_COMMIT=03312178c534dce949face80c69812d989e55009
PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../bin:/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/../../scripts/bin
RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect
GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct
POD_CONTAINER=golang
PWD=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow
HUDSON_URL=https://do.pingcap.net/jenkins/
TICDC_COVERALLS_TOKEN=****
JOB_NAME=pingcap/tiflow/pull_cdc_integration_kafka_test
TZ=Asia/Shanghai
BUILD_DISPLAY_NAME=#1855
TEST_GROUP=G14
JENKINS_URL=https://do.pingcap.net/jenkins/
BUILD_ID=1786957579159605252
TICDC_CODECOV_TOKEN=****
GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742
JOB_BASE_NAME=pull_cdc_integration_kafka_test
GIT_PREVIOUS_SUCCESSFUL_COMMIT=03312178c534dce949face80c69812d989e55009
RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/1855/display/redirect?page=tests
SHLVL=5
HOME=/home/jenkins
POD_LABEL=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-rdz18
GOROOT=/usr/local/go
GIT_BRANCH=origin/main
KUBERNETES_PORT_443_TCP_PROTO=tcp
TINI_VERSION=v0.19.0
CI=true
KUBERNETES_SERVICE_PORT_HTTPS=443
WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test@tmp
EXECUTOR_NUMBER=0
JENKINS_SERVER_COOKIE=durable-8f1d433d6527e85b4c28b432b07fa0c56dd3090c9176381d29a5b4531676247a
NODE_LABELS=pingcap_tiflow_pull_cdc_integration_kafka_test_1855-rdz18 pingcap-tiflow-pull-cdc-integration-kafka-test-1855-rdz18-z9rmg
GIT_URL=https://github.com/PingCAP-QE/ci.git
HUDSON_HOME=/var/jenkins_home
CLASSPATH=
NODE_NAME=pingcap-tiflow-pull-cdc-integration-kafka-test-1855-rdz18-z9rmg
GOPATH=/go
JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/pull_cdc_integration_kafka_test/display/redirect
BUILD_NUMBER=1855
KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1
KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443
GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz
_=/usr/bin/env
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_finish/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
table ddl_manager.finish_mark not exists for 81-th check, retry later
[2024/05/05 11:26:01.338 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:26:01.440 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:26:01.442 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs114d94a3_aa00_4ca1_899d_2695c8a275b1"]
[2024/05/05 11:26:01.552 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[Sun May  5 11:26:01 CST 2024] <<<<<< START cdc server in kafka_simple_basic_avro case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ GO_FAILPOINTS=
+ (( i = 0 ))
+ (( i <= 50 ))
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_basic_avro.1015310155.out server --log-file /tmp/tidb_cdc_test/kafka_simple_basic_avro/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_simple_basic_avro/cdc_data --cluster-id default
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
[2024/05/05 11:26:01.586 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:26:01.630 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs42c37d16_3493_48af_ac46_1e4ce39fa688"]
[2024/05/05 11:26:01.631 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:26:01.631 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:26:01.637 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:26:01.682 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:26:01.718 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:26:01.759 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:26:01.766 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:26:01.771 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
\033[0;36m<<< Run all test success >>>\033[0m
check diff failed 5-th time, retry later
[Pipeline] }
Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc already exists)
[Pipeline] // cache
[Pipeline] }
check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/capture' 'capture'
+ key_prefix=/tidb/cdc/default/__cdc_meta__/capture
+ message=capture
++ etcdctl get /tidb/cdc/default/__cdc_meta__/capture --prefix --keys-only
+ info=
+ [[ '' =~ capture ]]
+ echo 'check pass'
check pass
+ exit 0
run task successfully
check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/owner' 'owner'
+ key_prefix=/tidb/cdc/default/__cdc_meta__/owner
+ message=owner
++ etcdctl get /tidb/cdc/default/__cdc_meta__/owner --prefix --keys-only
+ info=
+ [[ '' =~ owner ]]
+ echo 'check pass'
check pass
+ exit 0
run task successfully
[Sun May  5 11:26:01 CST 2024] <<<<<< START cdc server in changefeed_error case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/NewChangefeedRetryError=return(true)'
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.62306232.out server --log-file /tmp/tidb_cdc_test/changefeed_error/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_error/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
[2024/05/05 11:26:02.117 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:26:02.151 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:26:02.154 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:26:02.159 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:26:02.164 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsf2ceda0f_32e9_4462_90d4_5ba1f0b32c4c"]
[2024/05/05 11:26:02.204 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:26:02.240 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsa0e8ddad_a8ea_415c_81b3_2874b553e06f"]
[2024/05/05 11:26:02.263 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:26:02.275 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:26:02.282 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[Pipeline] // dir
[Pipeline] }
+ set +x
[Sun May  5 11:26:02 CST 2024] <<<<<< START kafka consumer in multi_capture case >>>>>>
table multi_capture_1.usertable not exists for 1-th check, retry later
[2024/05/05 11:26:02.449 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:26:02.537 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:26:02.540 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:26:02.546 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:26:02.623 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:26:02.627 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:26:02.628 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
[2024/05/05 11:26:02.657 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:26:02.660 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:26:02.683 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:26:02.689 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:26:02.704 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[2024/05/05 11:26:02.929 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:26:02.934 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsf2d0258a_31c7_4ad1_b829_a19c1bb3fa41"]
[2024/05/05 11:26:03.019 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:26:03.020 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:26:03.030 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:26:03.032 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs4979c7db_2399_4757_91df_601f739c6bd7"]
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[2024/05/05 11:26:03.223 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:26:03.227 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:26:03.241 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:26:03.253 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:26:03.303 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:26:03.315 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:26:03.324 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:26:03.325 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[Pipeline] // podTemplate
table ddl_manager.finish_mark not exists for 82-th check, retry later
[Pipeline] }
[2024/05/05 11:26:03.446 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:26:03.449 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:26:03.451 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:26:03.520 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:26:03.524 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:26:03.529 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:26:03.536 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:26:03.548 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[Pipeline] // withEnv
[Pipeline] }
start tidb cluster in /tmp/tidb_cdc_test/new_ci_collation
Starting Upstream PD...
[Pipeline] // stage
[2024/05/05 11:26:03.727 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:26:03.812 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:26:03.835 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[Pipeline] }
\033[0;36m<<< Run all test success >>>\033[0m
check diff successfully
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
[2024/05/05 11:26:03.937 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:26:03.939 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:26:03.940 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:26:03.953 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:26:04.010 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:26:04.123 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:26:04.136 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:26:04.148 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[Pipeline] }
Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc already exists)
[Pipeline] // cache
[Pipeline] }
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/processor_etcd_worker_delay/run.sh using Sink-Type: kafka... <<=================
[Sun May  5 11:26:03 CST 2024] <<<<<< run test case processor_etcd_worker_delay success! >>>>>>
[Pipeline] // dir
[2024/05/05 11:26:04.219 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:26:04.229 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:26:04.234 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:26:04.238 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:26:04.240 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[Pipeline] }
start tidb cluster in /tmp/tidb_cdc_test/tiflash
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
[Pipeline] // withCredentials
[Pipeline] }
[2024/05/05 11:26:04.430 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:26:04.438 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:26:04.545 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:26:04.635 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:26:04.639 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:26:04 GMT
< Content-Type: text/plain; charset=utf-8
< Transfer-Encoding: chunked
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/03a82572-2245-495c-b931-3cf0af80cfef
	{"id":"03a82572-2245-495c-b931-3cf0af80cfef","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879561}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count
	3

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8e4b1dc
	03a82572-2245-495c-b931-3cf0af80cfef

/tidb/cdc/default/default/changefeed/info/changefeed-error
	{"upstream-id":7365351471431534517,"namespace":"default","changefeed-id":"changefeed-error","sink-uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-28424?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:25:41.25562575+08:00","start-ts":449545380868128769,"target-ts":0,"admin-job-type":1,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"pending","error":{"time":"2024-05-05T11:26:02.058437493+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrOwnerUnknown","message":"failpoint injected retriable error"},"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545387906957317}

/tidb/cdc/default/default/changefeed/status/changefeed-error
	{"checkpoint-ts":449545384393965572,"min-table-barrier-ts":449545384393965572,"admin-job-type":1}

/tidb/cdc/default/default/upstream/7365351471431534517
	{"id":7365351471431534517,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/03a82572-2245-495c-b931-3cf0af80cfef
	{"id":"03a82572-2245-495c-b931-3cf0af80cfef","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879561}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count
	3

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8e4b1dc
	03a82572-2245-495c-b931-3cf0af80cfef

/tidb/cdc/default/default/changefeed/info/changefeed-error
	{"upstream-id":7365351471431534517,"namespace":"default","changefeed-id":"changefeed-error","sink-uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-28424?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:25:41.25562575+08:00","start-ts":449545380868128769,"target-ts":0,"admin-job-type":1,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"pending","error":{"time":"2024-05-05T11:26:02.058437493+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrOwnerUnknown","message":"failpoint injected retriable error"},"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545387906957317}

/tidb/cdc/default/default/changefeed/status/changefeed-error
	{"checkpoint-ts":449545384393965572,"min-table-barrier-ts":449545384393965572,"admin-job-type":1}

/tidb/cdc/default/default/upstream/7365351471431534517
	{"id":7365351471431534517,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/03a82572-2245-495c-b931-3cf0af80cfef
	{"id":"03a82572-2245-495c-b931-3cf0af80cfef","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879561}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count
	3

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8e4b1dc
	03a82572-2245-495c-b931-3cf0af80cfef

/tidb/cdc/default/default/changefeed/info/changefeed-error
	{"upstream-id":7365351471431534517,"namespace":"default","changefeed-id":"changefeed-error","sink-uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-28424?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:25:41.25562575+08:00","start-ts":449545380868128769,"target-ts":0,"admin-job-type":1,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"pending","error":{"time":"2024-05-05T11:26:02.058437493+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrOwnerUnknown","message":"failpoint injected retriable error"},"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545387906957317}

/tidb/cdc/default/default/changefeed/status/changefeed-error
	{"checkpoint-ts":449545384393965572,"min-table-barrier-ts":449545384393965572,"admin-job-type":1}

/tidb/cdc/default/default/upstream/7365351471431534517
	{"id":7365351471431534517,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
check_changefeed_state http://127.0.0.1:2379 changefeed-error warning failpoint injected retriable error
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=changefeed-error
+ expected_state=warning
+ error_msg=failpoint
+ tls_dir=error
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-error -s
+ info='{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-error",
  "state": "warning",
  "checkpoint_tso": 449545384393965572,
  "checkpoint_time": "2024-05-05 11:25:48.622",
  "error": {
    "time": "2024-05-05T11:26:02.058437493+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrOwnerUnknown",
    "message": "failpoint injected retriable error"
  }
}'
+ echo '{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-error",
  "state": "warning",
  "checkpoint_tso": 449545384393965572,
  "checkpoint_time": "2024-05-05 11:25:48.622",
  "error": {
    "time": "2024-05-05T11:26:02.058437493+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrOwnerUnknown",
    "message": "failpoint injected retriable error"
  }
}'
{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-error",
  "state": "warning",
  "checkpoint_tso": 449545384393965572,
  "checkpoint_time": "2024-05-05 11:25:48.622",
  "error": {
    "time": "2024-05-05T11:26:02.058437493+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrOwnerUnknown",
    "message": "failpoint injected retriable error"
  }
}
++ echo '{' '"upstream_id":' 7365351471431534517, '"namespace":' '"default",' '"id":' '"changefeed-error",' '"state":' '"warning",' '"checkpoint_tso":' 449545384393965572, '"checkpoint_time":' '"2024-05-05' '11:25:48.622",' '"error":' '{' '"time":' '"2024-05-05T11:26:02.058437493+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrOwnerUnknown",' '"message":' '"failpoint' injected retriable 'error"' '}' '}'
++ jq -r .state
+ state=warning
+ [[ ! warning == \w\a\r\n\i\n\g ]]
++ echo '{' '"upstream_id":' 7365351471431534517, '"namespace":' '"default",' '"id":' '"changefeed-error",' '"state":' '"warning",' '"checkpoint_tso":' 449545384393965572, '"checkpoint_time":' '"2024-05-05' '11:25:48.622",' '"error":' '{' '"time":' '"2024-05-05T11:26:02.058437493+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrOwnerUnknown",' '"message":' '"failpoint' injected retriable 'error"' '}' '}'
++ jq -r .error.message
+ message='failpoint injected retriable error'
+ [[ ! failpoint injected retriable error =~ failpoint ]]
run task successfully
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6331.out cli changefeed remove -c changefeed-error
[2024/05/05 11:26:04.711 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:26:04.738 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:26:04.739 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:26:04.830 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:26:04.842 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:26:04.845 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:26:04.859 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:26:04.903 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:26:04.909 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:26:04.918 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:26:04.920 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[Pipeline] // container
table multi_capture_1.usertable not exists for 2-th check, retry later
[Pipeline] }
The 1 times to try to start tidb cluster...
[Pipeline] // withEnv
[2024/05/05 11:26:04.954 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:26:04.966 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:26:05.134 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[Pipeline] }
table ddl_manager.finish_mark exists
[Pipeline] // node
[Pipeline] }
[2024/05/05 11:26:05.211 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:26:05.228 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:26:05.406 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:26:05.439 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:26:05.441 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[Pipeline] // podTemplate
[Pipeline] }
check diff successfully
[Pipeline] // withEnv
start tidb cluster in /tmp/tidb_cdc_test/changefeed_finish
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
[Pipeline] }
[2024/05/05 11:26:05.532 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:26:05.538 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:26:05.558 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:26:05.601 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:26:05.608 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:26:05.608 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:26:05.625 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:26:05.633 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:26:05.638 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:26:05.647 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:26:05 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/36a49ec8-4bca-46aa-9720-911a800c4ea0
	{"id":"36a49ec8-4bca-46aa-9720-911a800c4ea0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879561}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c946a8cf
	36a49ec8-4bca-46aa-9720-911a800c4ea0

/tidb/cdc/default/default/upstream/7365351579451726366
	{"id":7365351579451726366,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/36a49ec8-4bca-46aa-9720-911a800c4ea0
	{"id":"36a49ec8-4bca-46aa-9720-911a800c4ea0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879561}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c946a8cf
	36a49ec8-4bca-46aa-9720-911a800c4ea0

/tidb/cdc/default/default/upstream/7365351579451726366
	{"id":7365351579451726366,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/36a49ec8-4bca-46aa-9720-911a800c4ea0
	{"id":"36a49ec8-4bca-46aa-9720-911a800c4ea0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879561}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c946a8cf
	36a49ec8-4bca-46aa-9720-911a800c4ea0

/tidb/cdc/default/default/upstream/7365351579451726366
	{"id":7365351579451726366,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_basic_avro.cli.10210.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-simple-basic-avro-15728?protocol=simple&encoding-format=avro' --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_basic_avro/conf/changefeed.toml
[Pipeline] // stage
Changefeed remove successfully.
ID: changefeed-error
CheckpointTs: 449545384393965572
SinkURI: kafka://127.0.0.1:9092/ticdc-sink-retry-test-28424?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
[Pipeline] }
[2024/05/05 11:26:05.724 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:26:05.729 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:26:05.749 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:26:05.935 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
wait process cdc.test exit for 1-th time...
[2024/05/05 11:26:06.042 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:26:06.118 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
Verifying downstream PD is started...
[2024/05/05 11:26:06.234 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:26:06.241 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:26:06.338 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:26:06.342 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:26:06.344 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:26:06.347 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:26:06.402 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:26:06.412 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:26:06.415 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:26:06.437 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:26:06.457 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:26:06.458 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
check diff failed 1-th time, retry later
[2024/05/05 11:26:06.506 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:26:06.559 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:26:06.634 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:26:06.658 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
wait process cdc.test exit for 2-th time...
table multi_capture_1.usertable exists
table multi_capture_2.usertable exists
table multi_capture_3.usertable not exists for 1-th check, retry later
[2024/05/05 11:26:06.741 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:26:06.828 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:26:06.930 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:26:06.934 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:26:06.935 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:26:06.942 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
wait process cdc.test exit for 3-th time...
Create changefeed successfully!
ID: 6db78db8-0c68-4f9b-8cd2-94e9999e6b94
Info: {"upstream_id":7365351579451726366,"namespace":"default","id":"6db78db8-0c68-4f9b-8cd2-94e9999e6b94","sink_uri":"kafka://127.0.0.1:9092/ticdc-simple-basic-avro-15728?protocol=simple\u0026encoding-format=avro","create_time":"2024-05-05T11:26:06.940844627+08:00","start_ts":449545388911493125,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"simple","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"correctness","corruption_handle_level":"error"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545388911493125,"checkpoint_ts":449545388911493125,"checkpoint_time":"2024-05-05 11:26:05.855"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
+ set +x
check_no_changefeed 127.0.0.1:2379
parse error: Invalid numeric literal at line 1, column 6
run task successfully
[2024/05/05 11:26:07.008 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:26:07.030 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:26:07.031 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:26:07.121 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:26:07.133 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:26:07.142 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:26:07.207 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:26:07.245 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:26:07.334 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:26:07.351 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:26:07.409 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
wait process cdc.test exit for 4-th time...
find: '/tmp/tidb_cdc_test/*/*': No such file or directory
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sink_hang/run.sh using Sink-Type: kafka... <<=================
[Sun May  5 11:26:06 CST 2024] <<<<<< run test case sink_hang success! >>>>>>
[2024/05/05 11:26:07.517 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:26:07.605 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:26:07.610 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:26:07.616 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:26:07.623 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:26:07.636 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:26:07.650 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:26:07.651 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
wait process cdc.test exit for 1-th time...
[2024/05/05 11:26:07.739 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:26:07.739 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:26:07.815 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:26:07.837 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:26:07.906 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
wait process cdc.test exit for 5-th time...
[2024/05/05 11:26:08.002 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:26:08.009 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:26:08.027 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:26:08.128 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:26:08.231 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:26:08.235 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:26:08.240 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:26:08.241 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
wait process cdc.test exit for 2-th time...
start tidb cluster in /tmp/tidb_cdc_test/many_pk_or_uk
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
+ set +x
check diff failed 2-th time, retry later
[2024/05/05 11:26:08.252 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:26:08.314 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:26:08.329 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:26:08.411 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:26:08.416 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:26:08.439 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:26:08.452 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
wait process cdc.test exit for 6-th time...
[2024/05/05 11:26:08.536 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:26:08.604 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:26:08.614 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:26:08.630 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
table multi_capture_3.usertable exists
table multi_capture_4.usertable not exists for 1-th check, retry later
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/owner' 'owner'
+ key_prefix=/tidb/cdc/default/__cdc_meta__/owner
+ message=owner
++ etcdctl get /tidb/cdc/default/__cdc_meta__/owner --prefix --keys-only
+ info=
+ [[ '' =~ owner ]]
+ echo 'check pass'
check pass
+ exit 0
run task successfully
[Sun May  5 11:26:08 CST 2024] <<<<<< START cdc server in changefeed_error case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/InjectChangefeedDDLError=return(true)'
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.64416443.out server --log-file /tmp/tidb_cdc_test/changefeed_error/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_error/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
[2024/05/05 11:26:08.829 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:26:08.837 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:26:08.847 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:26:08.849 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:26:08.925 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:26:08.935 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:25:56.778 +08:00] [INFO] [pd_service_discovery.go:1016] ["[pd] switch leader"] [new-leader=http://127.0.0.1:2379] [old-leader=]
[2024/05/05 11:25:56.778 +08:00] [INFO] [pd_service_discovery.go:498] ["[pd] init cluster id"] [cluster-id=7365351069860358412]
[2024/05/05 11:25:56.778 +08:00] [INFO] [client.go:606] ["[pd] changing service mode"] [old-mode=UNKNOWN_SVC_MODE] [new-mode=PD_SVC_MODE]
[2024/05/05 11:25:56.778 +08:00] [INFO] [tso_client.go:236] ["[tso] switch dc tso global allocator serving url"] [dc-location=global] [new-url=http://127.0.0.1:2379]
[2024/05/05 11:25:56.779 +08:00] [INFO] [tso_dispatcher.go:359] ["[tso] tso dispatcher created"] [dc-location=global]
[2024/05/05 11:25:56.779 +08:00] [INFO] [client.go:612] ["[pd] service mode changed"] [old-mode=UNKNOWN_SVC_MODE] [new-mode=PD_SVC_MODE]
[2024/05/05 11:25:56.780 +08:00] [INFO] [pd_service_discovery.go:1016] ["[pd] switch leader"] [new-leader=http://127.0.0.1:2379] [old-leader=]
[2024/05/05 11:25:56.780 +08:00] [INFO] [pd_service_discovery.go:498] ["[pd] init cluster id"] [cluster-id=7365351069860358412]
[2024/05/05 11:25:56.780 +08:00] [INFO] [client.go:606] ["[pd] changing service mode"] [old-mode=UNKNOWN_SVC_MODE] [new-mode=PD_SVC_MODE]
[2024/05/05 11:25:56.780 +08:00] [INFO] [tso_client.go:236] ["[tso] switch dc tso global allocator serving url"] [dc-location=global] [new-url=http://127.0.0.1:2379]
[2024/05/05 11:25:56.781 +08:00] [INFO] [tso_dispatcher.go:359] ["[tso] tso dispatcher created"] [dc-location=global]
[2024/05/05 11:25:56.781 +08:00] [INFO] [client.go:612] ["[pd] service mode changed"] [old-mode=UNKNOWN_SVC_MODE] [new-mode=PD_SVC_MODE]
[2024/05/05 11:25:56.781 +08:00] [INFO] [tikv_driver.go:197] ["using API V1."]
[2024/05/05 11:25:56.782 +08:00] [INFO] [main.go:180] ["genLock started"]
[2024/05/05 11:25:56.782 +08:00] [INFO] [store_cache.go:477] ["change store resolve state"] [store=3] [addr=127.0.0.1:20161] [from=unresolved] [to=resolved] [liveness-state=reachable]
[2024/05/05 11:26:06.793 +08:00] [INFO] [main.go:196] ["genLock done"]
[2024/05/05 11:26:06.793 +08:00] [INFO] [pd_service_discovery.go:550] ["[pd] exit member loop due to context canceled"]
[2024/05/05 11:26:06.793 +08:00] [INFO] [resource_manager_client.go:295] ["[resource manager] exit resource token dispatcher"]
[2024/05/05 11:26:06.793 +08:00] [INFO] [tso_dispatcher.go:214] ["exit tso requests cancel loop"]
[2024/05/05 11:26:06.793 +08:00] [INFO] [tso_dispatcher.go:455] ["[tso] stop fetching the pending tso requests due to context canceled"] [dc-location=global]
[2024/05/05 11:26:06.793 +08:00] [INFO] [tso_client.go:140] ["closing tso client"]
[2024/05/05 11:26:06.793 +08:00] [INFO] [tso_dispatcher.go:268] ["exit tso dispatcher loop"]
[2024/05/05 11:26:06.793 +08:00] [INFO] [tso_dispatcher.go:380] ["[tso] exit tso dispatcher"] [dc-location=global]
[2024/05/05 11:26:06.793 +08:00] [INFO] [tso_batch_controller.go:158] ["[pd] clear the tso batch controller"] [max-batch-size=10000] [best-batch-size=1] [collected-request-count=0] [pending-request-count=0]
[2024/05/05 11:26:06.793 +08:00] [INFO] [tso_client.go:145] ["close tso client"]
[2024/05/05 11:26:06.793 +08:00] [INFO] [tso_batch_controller.go:158] ["[pd] clear the tso batch controller"] [max-batch-size=10000] [best-batch-size=1] [collected-request-count=0] [pending-request-count=0]
[2024/05/05 11:26:06.793 +08:00] [INFO] [tso_client.go:155] ["tso client is closed"]
[2024/05/05 11:26:06.793 +08:00] [INFO] [pd_service_discovery.go:637] ["[pd] close pd service discovery client"]
wait process cdc.test exit for 7-th time...
[2024/05/05 11:26:09.027 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:26:09.033 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:26:09.051 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:26:09.114 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:26:09.206 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:26:09.210 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:26:09.222 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:26:09.240 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
cdc.test: no process found
wait process cdc.test exit for 8-th time...
process cdc.test already exit
[Sun May  5 11:26:09 CST 2024] <<<<<< run test case ddl_manager success! >>>>>>
[2024/05/05 11:26:09.335 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:26:09.346 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:26:09.422 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:26:09.427 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:26:09.440 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:26:09.512 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[2024/05/05 11:26:09.546 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:26:09.611 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:26:09.636 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:26:09.650 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:26:09.814 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:26:09.820 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:26:09.867 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:26:09.901 +08:00] [INFO] [main.go:178] ["73 insert success: 1100"]
[2024/05/05 11:26:09.933 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:26:09.942 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
[2024/05/05 11:26:09.946 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:26:10.018 +08:00] [INFO] [main.go:178] ["72 insert success: 1100"]
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[2024/05/05 11:26:10.041 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:26:10.117 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:26:10.138 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"]
[2024/05/05 11:26:10.229 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
check diff failed 3-th time, retry later
[2024/05/05 11:26:10.353 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
[2024/05/05 11:26:10.425 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:26:10.502 +08:00] [INFO] [main.go:178] ["73 insert success: 1200"]
table multi_capture_4.usertable exists
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[2024/05/05 11:26:10.617 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:26:10.627 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:26:10.642 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:26:10.664 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:26:10.666 +08:00] [INFO] [main.go:178] ["72 insert success: 1200"]
[2024/05/05 11:26:10.729 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:26:10.782 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
\033[0;36m<<< Run all test success >>>\033[0m
check diff failed 1-th time, retry later
[Pipeline] }
[2024/05/05 11:26:10.830 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:26:10.855 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"]
[2024/05/05 11:26:10.859 +08:00] [INFO] [main.go:178] ["73 insert success: 1300"]
[2024/05/05 11:26:10.960 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"]
[2024/05/05 11:26:11.017 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:26:11.039 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:26:11.126 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:26:11.126 +08:00] [INFO] [main.go:178] ["72 insert success: 1300"]
[2024/05/05 11:26:11.170 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"]
[2024/05/05 11:26:11.260 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"]
[2024/05/05 11:26:11.300 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:26:11.333 +08:00] [INFO] [main.go:178] ["73 insert success: 1400"]
[2024/05/05 11:26:11.371 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"]
[2024/05/05 11:26:11.383 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:26:11.445 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs2548cefe_1695_42d1_ae96_c438802b1fd0"]
[2024/05/05 11:26:11.451 +08:00] [INFO] [main.go:178] ["72 insert success: 1400"]
[2024/05/05 11:26:11.531 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs1a7c1510_f10d_4aa7_a0dc_58d3b34efcda"]
[2024/05/05 11:26:11.686 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:26:11.708 +08:00] [INFO] [main.go:178] ["73 insert success: 1500"]
[2024/05/05 11:26:11.750 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsa651f0e9_2930_43f8_97fb_e8d96c76b70a"]
[2024/05/05 11:26:11.785 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:26:11.787 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:26:11.791 +08:00] [INFO] [main.go:178] ["72 insert success: 1500"]
[2024/05/05 11:26:11.843 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:26:11.844 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc already exists)
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:26:11 GMT
< Content-Length: 883
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/c2551cbe-5817-491f-8c8d-a8c2d40c1db0
	{"id":"c2551cbe-5817-491f-8c8d-a8c2d40c1db0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879569}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count
	4

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8e4b234
	c2551cbe-5817-491f-8c8d-a8c2d40c1db0

/tidb/cdc/default/default/upstream/7365351471431534517
	{"id":7365351471431534517,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/c2551cbe-5817-491f-8c8d-a8c2d40c1db0
	{"id":"c2551cbe-5817-491f-8c8d-a8c2d40c1db0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879569}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count
	4

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8e4b234
	c2551cbe-5817-491f-8c8d-a8c2d40c1db0

/tidb/cdc/default/default/upstream/7365351471431534517
	{"id":7365351471431534517,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/c2551cbe-5817-491f-8c8d-a8c2d40c1db0
	{"id":"c2551cbe-5817-491f-8c8d-a8c2d40c1db0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879569}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count
	4

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8e4b234
	c2551cbe-5817-491f-8c8d-a8c2d40c1db0

/tidb/cdc/default/default/upstream/7365351471431534517
	{"id":7365351471431534517,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6494.out cli changefeed create --start-ts=449545380868128769 '--sink-uri=kafka://127.0.0.1:9092/ticdc-sink-retry-test-28424?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c changefeed-error-1
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[Pipeline] // cache
[2024/05/05 11:26:12.124 +08:00] [INFO] [main.go:178] ["73 insert success: 1600"]
[2024/05/05 11:26:12.168 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:26:12.229 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:26:12.229 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:26:12.248 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:26:12.277 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:26:12.277 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
Create changefeed successfully!
ID: changefeed-error-1
Info: {"upstream_id":7365351471431534517,"namespace":"default","id":"changefeed-error-1","sink_uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-28424?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:26:12.182686179+08:00","start_ts":449545380868128769,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545380868128769,"checkpoint_ts":449545380868128769,"checkpoint_time":"2024-05-05 11:25:35.172"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
[Pipeline] }
table test.t2 not exists for 1-th check, retry later
[Pipeline] // dir
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
[Pipeline] }
[2024/05/05 11:26:12.515 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLsb2ee985c_c555_4857_8529_ec554cea4159"]
[2024/05/05 11:26:12.517 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[Pipeline] // withCredentials
[Pipeline] }
check diff successfully
[Pipeline] // timeout
[2024/05/05 11:26:12.603 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:26:12.615 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:26:12.632 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[2024/05/05 11:26:12.661 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:26:12.666 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:26:12.738 +08:00] [INFO] [main.go:835] ["running ddl test: testMultiDDLs169369f0_e943_4a2e_bbe2_4a9e159ad4c7"]
[2024/05/05 11:26:12.823 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
check diff failed 4-th time, retry later
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
***************** properties *****************
"mysql.port"="4000"
"dotransactions"="false"
"readallfields"="true"
"mysql.user"="root"
"recordcount"="20"
"threadcount"="2"
"updateproportion"="0"
"operationcount"="0"
"readproportion"="0"
"requestdistribution"="uniform"
"insertproportion"="0"
"workload"="core"
"mysql.db"="multi_capture_1"
"scanproportion"="0"
"mysql.host"="127.0.0.1"
**********************************************
Run finished, takes 11.00554ms
INSERT - Takes(s): 0.0, Count: 20, OPS: 2063.1, Avg(us): 1046, Min(us): 537, Max(us): 1683, 95th(us): 2000, 99th(us): 2000
***************** properties *****************
"threadcount"="2"
"mysql.db"="multi_capture_2"
"readallfields"="true"
"mysql.user"="root"
"insertproportion"="0"
"workload"="core"
"updateproportion"="0"
"dotransactions"="false"
"recordcount"="20"
"operationcount"="0"
"readproportion"="0"
"mysql.host"="127.0.0.1"
"mysql.port"="4000"
"scanproportion"="0"
"requestdistribution"="uniform"
**********************************************
Run finished, takes 9.476201ms
INSERT - Takes(s): 0.0, Count: 20, OPS: 2404.7, Avg(us): 889, Min(us): 495, Max(us): 1532, 95th(us): 2000, 99th(us): 2000
***************** properties *****************
"mysql.port"="4000"
"dotransactions"="false"
"readallfields"="true"
"workload"="core"
"scanproportion"="0"
"mysql.host"="127.0.0.1"
"readproportion"="0"
"mysql.db"="multi_capture_3"
"insertproportion"="0"
"mysql.user"="root"
"requestdistribution"="uniform"
"recordcount"="20"
"operationcount"="0"
"threadcount"="2"
"updateproportion"="0"
**********************************************
Run finished, takes 13.213872ms
INSERT - Takes(s): 0.0, Count: 20, OPS: 1739.7, Avg(us): 1273, Min(us): 484, Max(us): 4442, 95th(us): 5000, 99th(us): 5000
***************** properties *****************
"requestdistribution"="uniform"
"mysql.db"="multi_capture_4"
"readallfields"="true"
"dotransactions"="false"
"threadcount"="2"
"scanproportion"="0"
"insertproportion"="0"
"readproportion"="0"
"mysql.port"="4000"
"mysql.host"="127.0.0.1"
"workload"="core"
"mysql.user"="root"
"updateproportion"="0"
"operationcount"="0"
"recordcount"="20"
**********************************************
Run finished, takes 10.217418ms
INSERT - Takes(s): 0.0, Count: 20, OPS: 2260.2, Avg(us): 971, Min(us): 416, Max(us): 1637, 95th(us): 2000, 99th(us): 2000
check diff failed 1-th time, retry later
[2024/05/05 11:26:12.830 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:26:12.834 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:26:12.916 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:26:12.936 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:26:12.953 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:26:13.025 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:26:13.027 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[2024/05/05 11:26:13.208 +08:00] [INFO] [main.go:178] ["73 insert success: 100"]
[2024/05/05 11:26:13.218 +08:00] [INFO] [main.go:178] ["72 insert success: 100"]
[2024/05/05 11:26:13.300 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:26:13.311 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:26:13.321 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] }
[2024/05/05 11:26:13.346 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:26:13.379 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:26:13.386 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:26:13.404 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:26:13.413 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:26:13.500 +08:00] [INFO] [main.go:178] ["73 insert success: 200"]
[2024/05/05 11:26:13.524 +08:00] [INFO] [main.go:178] ["72 insert success: 200"]
[Pipeline] // withEnv
[Pipeline] }
[Sun May  5 11:26:13 CST 2024] <<<<<< START kafka consumer in kafka_simple_basic_avro case >>>>>>
[Pipeline] // stage
+ set +x
[Pipeline] }
[2024/05/05 11:26:13.628 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:26:13.637 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
[2024/05/05 11:26:13.654 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:26:13.675 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:26:13.729 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:26:13.739 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:26:13.747 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:26:13.762 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:26:13.811 +08:00] [INFO] [main.go:178] ["73 insert success: 300"]
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[2024/05/05 11:26:13.856 +08:00] [INFO] [main.go:178] ["72 insert success: 300"]
[2024/05/05 11:26:13.964 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:26:13.972 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:26:14.035 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:26:14.039 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
check_changefeed_status 127.0.0.1:8300 changefeed-error-1 warning last_warning ErrExecDDLFailed
+ endpoint=127.0.0.1:8300
+ changefeed_id=changefeed-error-1
+ expected_state=warning
+ field=last_warning
+ error_pattern=ErrExecDDLFailed
++ curl 127.0.0.1:8300/api/v2/changefeeds/changefeed-error-1/status
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed
[2024/05/05 11:26:14.106 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:26:14.107 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:26:14.120 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:26:14.136 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:26:14.139 +08:00] [INFO] [main.go:178] ["73 insert success: 400"]
[2024/05/05 11:26:14.199 +08:00] [INFO] [main.go:178] ["72 insert success: 400"]
[2024/05/05 11:26:14.302 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:26:14.336 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   252  100   252    0     0   1703      0 --:--:-- --:--:-- --:--:--  1714
+ info='{"state":"warning","resolved_ts":449545381248499738,"checkpoint_ts":449545381248499738,"last_warning":{"time":"2024-05-05T11:26:13.85649954+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrExecDDLFailed","message":"[CDC:ErrExecDDLFailed]exec DDL failed"}}'
+ echo '{"state":"warning","resolved_ts":449545381248499738,"checkpoint_ts":449545381248499738,"last_warning":{"time":"2024-05-05T11:26:13.85649954+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrExecDDLFailed","message":"[CDC:ErrExecDDLFailed]exec DDL failed"}}'
{"state":"warning","resolved_ts":449545381248499738,"checkpoint_ts":449545381248499738,"last_warning":{"time":"2024-05-05T11:26:13.85649954+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrExecDDLFailed","message":"[CDC:ErrExecDDLFailed]exec DDL failed"}}
++ echo '{"state":"warning","resolved_ts":449545381248499738,"checkpoint_ts":449545381248499738,"last_warning":{"time":"2024-05-05T11:26:13.85649954+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrExecDDLFailed","message":"[CDC:ErrExecDDLFailed]exec' DDL 'failed"}}'
++ jq -r .state
+ state=warning
+ [[ ! warning == \w\a\r\n\i\n\g ]]
+ [[ -z last_warning ]]
++ echo '{"state":"warning","resolved_ts":449545381248499738,"checkpoint_ts":449545381248499738,"last_warning":{"time":"2024-05-05T11:26:13.85649954+08:00","addr":"127.0.0.1:8300","code":"CDC:ErrExecDDLFailed","message":"[CDC:ErrExecDDLFailed]exec' DDL 'failed"}}'
++ jq -r .last_warning.message
+ error_msg='[CDC:ErrExecDDLFailed]exec DDL failed'
+ [[ ! [CDC:ErrExecDDLFailed]exec DDL failed =~ ErrExecDDLFailed ]]
run task successfully
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6545.out cli changefeed remove -c changefeed-error-1
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[2024/05/05 11:26:14.374 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:26:14.402 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
[2024/05/05 11:26:14.469 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:26:14.480 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:26:14.509 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:26:14.516 +08:00] [INFO] [main.go:178] ["73 insert success: 500"]
[2024/05/05 11:26:14.527 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:26:14.552 +08:00] [INFO] [main.go:178] ["72 insert success: 500"]
table test.t2 not exists for 2-th check, retry later
check diff failed 5-th time, retry later
[2024/05/05 11:26:14.609 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:26:14.644 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:26:14.713 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:26:14.724 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:26:14.810 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:26:14.855 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:26:14.858 +08:00] [INFO] [main.go:178] ["73 insert success: 600"]
[2024/05/05 11:26:14.874 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:26:14.894 +08:00] [INFO] [main.go:178] ["72 insert success: 600"]
[2024/05/05 11:26:14.919 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:26:14.952 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:26:15.025 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[2024/05/05 11:26:15.122 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:26:15.174 +08:00] [INFO] [main.go:178] ["73 insert success: 700"]
[2024/05/05 11:26:15.207 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:26:15.228 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:26:15.247 +08:00] [INFO] [main.go:178] ["72 insert success: 700"]
[2024/05/05 11:26:15.253 +08:00] [INFO] [main.go:178] ["73 insert success: 1000"]
[2024/05/05 11:26:15.282 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:26:15.360 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
Changefeed remove successfully.
ID: changefeed-error-1
CheckpointTs: 449545381248499738
SinkURI: kafka://127.0.0.1:9092/ticdc-sink-retry-test-28424?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
check diff successfully
[2024/05/05 11:26:15.489 +08:00] [INFO] [main.go:178] ["73 insert success: 800"]
[2024/05/05 11:26:15.506 +08:00] [INFO] [main.go:178] ["72 insert success: 1000"]
[2024/05/05 11:26:15.544 +08:00] [INFO] [main.go:178] ["72 insert success: 800"]
[2024/05/05 11:26:15.578 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
wait process cdc.test exit for 1-th time...
[2024/05/05 11:26:15.667 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
[2024/05/05 11:26:15.780 +08:00] [INFO] [main.go:178] ["73 insert success: 900"]
[2024/05/05 11:26:15.859 +08:00] [INFO] [main.go:178] ["72 insert success: 900"]
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b26cb880019	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-jb9b1-r2rkj, pid:1408, start at 2024-05-05 11:26:14.799095789 +0800 CST m=+5.245329682	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:14.805 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:14.805 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:14.805 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[2024/05/05 11:26:16.107 +08:00] [INFO] [main.go:812] ["testMultiDDLs take %v44.299518223s"]
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
wait process cdc.test exit for 2-th time...
[2024/05/05 11:26:16.191 +08:00] [INFO] [main.go:74] ["DefaultValue integration tests take 44.383555832s"]
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b26d3b00009	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-nbv84-f5n24, pid:1291, start at 2024-05-05 11:26:15.286109136 +0800 CST m=+5.192098017	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:15.293 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:15.276 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:15.276 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.t2 not exists for 3-th check, retry later
check diff successfully
table mark.finish_mark_1 not exists for 1-th check, retry later
+ set +x
wait process cdc.test exit for 3-th time...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b26da680014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-rdz18-z9rmg, pid:1411, start at 2024-05-05 11:26:15.744372321 +0800 CST m=+5.096608962	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:15.750 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:15.756 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:15.756 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b26da680014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-rdz18-z9rmg, pid:1411, start at 2024-05-05 11:26:15.744372321 +0800 CST m=+5.096608962	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:15.750 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:15.756 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:15.756 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b26dc9c0016	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-rdz18-z9rmg, pid:1502, start at 2024-05-05 11:26:15.884396659 +0800 CST m=+5.179504616	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:15.892 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:15.897 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:15.897 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/changefeed_finish/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/changefeed_finish/tiflash/log/error.log
arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_finish/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_finish/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_finish/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
wait process cdc.test exit for 1-th time...
cdc.test: no process found
wait process cdc.test exit for 4-th time...
process cdc.test already exit
[Sun May  5 11:26:17 CST 2024] <<<<<< run test case multi_capture success! >>>>>>
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b26cb880019	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-jb9b1-r2rkj, pid:1408, start at 2024-05-05 11:26:14.799095789 +0800 CST m=+5.245329682	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:14.805 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:14.805 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:14.805 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b26cd0c0014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-jb9b1-r2rkj, pid:1480, start at 2024-05-05 11:26:14.881922681 +0800 CST m=+5.269555493	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:14.888 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:14.851 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:14.851 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
wait process cdc.test exit for 2-th time...
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/new_ci_collation/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/new_ci_collation/tiflash/log/error.log
arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/new_ci_collation/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/new_ci_collation/tiflash-proxy.toml"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/new_ci_collation/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
check_etcd_meta_not_exist '/tidb/cdc/default/__cdc_meta__/owner' 'owner'
+ key_prefix=/tidb/cdc/default/__cdc_meta__/owner
+ message=owner
++ etcdctl get /tidb/cdc/default/__cdc_meta__/owner --prefix --keys-only
+ info=
+ [[ '' =~ owner ]]
+ echo 'check pass'
check pass
+ exit 0
run task successfully
[Sun May  5 11:26:18 CST 2024] <<<<<< START cdc server in changefeed_error case >>>>>>
table test.t2 not exists for 4-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b26d3b00009	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-nbv84-f5n24, pid:1291, start at 2024-05-05 11:26:15.286109136 +0800 CST m=+5.192098017	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:15.293 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:15.276 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:15.276 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b26d4500015	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-nbv84-f5n24, pid:1374, start at 2024-05-05 11:26:15.355091843 +0800 CST m=+5.207925457	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:15.362 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:15.366 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:15.366 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/tiflash/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/tiflash/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/tiflash/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/tiflash/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/tiflash/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS='github.com/pingcap/tiflow/pkg/txnutil/gc/InjectActualGCSafePoint=return(9223372036854775807)'
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.66226624.out server --log-file /tmp/tidb_cdc_test/changefeed_error/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_error/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
check diff failed 1-th time, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table mark.finish_mark_1 not exists for 2-th check, retry later
[Sun May  5 11:26:19 CST 2024] <<<<<< START cdc server in changefeed_finish case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ GO_FAILPOINTS=
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_finish.28132815.out server --log-file /tmp/tidb_cdc_test/changefeed_finish/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_finish/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2721480014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-klk36-npv9p, pid:1459, start at 2024-05-05 11:26:20.286972613 +0800 CST m=+6.044550901	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:20.298 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:20.293 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:20.293 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.t2 not exists for 5-th check, retry later
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.tiflash.cli.2838.out cli tso query --pd=http://127.0.0.1:2379
check diff failed 2-th time, retry later
table mark.finish_mark_1 not exists for 3-th check, retry later
[Sun May  5 11:26:20 CST 2024] <<<<<< START cdc server in new_ci_collation case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ GO_FAILPOINTS=
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.new_ci_collation.28582860.out server --log-file /tmp/tidb_cdc_test/new_ci_collation/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/new_ci_collation/cdc_data --cluster-id default
+ [[ no != \n\o ]]
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:26:21 GMT
< Content-Length: 883
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/c0790b3f-baaa-485a-a1ae-4f560d49c84e
	{"id":"c0790b3f-baaa-485a-a1ae-4f560d49c84e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879578}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count
	5

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8e4b2b5
	c0790b3f-baaa-485a-a1ae-4f560d49c84e

/tidb/cdc/default/default/upstream/7365351471431534517
	{"id":7365351471431534517,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/c0790b3f-baaa-485a-a1ae-4f560d49c84e
	{"id":"c0790b3f-baaa-485a-a1ae-4f560d49c84e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879578}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count
	5

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8e4b2b5
	c0790b3f-baaa-485a-a1ae-4f560d49c84e

/tidb/cdc/default/default/upstream/7365351471431534517
	{"id":7365351471431534517,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/c0790b3f-baaa-485a-a1ae-4f560d49c84e
	{"id":"c0790b3f-baaa-485a-a1ae-4f560d49c84e","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879578}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count
	5

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8e4b2b5
	c0790b3f-baaa-485a-a1ae-4f560d49c84e

/tidb/cdc/default/default/upstream/7365351471431534517
	{"id":7365351471431534517,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6680.out cli changefeed create --start-ts=449545380868128769 '--sink-uri=kafka://127.0.0.1:9092/ticdc-sink-retry-test-28424?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c changefeed-error-2
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_source/run.sh using Sink-Type: kafka... <<=================
Create changefeed successfully!
ID: changefeed-error-2
Info: {"upstream_id":7365351471431534517,"namespace":"default","id":"changefeed-error-2","sink_uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-28424?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:26:21.743247041+08:00","start_ts":449545380868128769,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545380868128769,"checkpoint_ts":449545380868128769,"checkpoint_time":"2024-05-05 11:25:35.172"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
table test1.finishmark exists
[2024/05/05 11:26:12.694 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test.t1] [checkSum=3030946575]
[2024/05/05 11:26:12.696 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test.t2] [checkSum=718014124]
[2024/05/05 11:26:12.698 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test.t3] [checkSum=718014124]
[2024/05/05 11:26:12.704 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test1.finishmark] [checkSum=0]
[2024/05/05 11:26:12.706 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test1.t1] [checkSum=718014124]
[2024/05/05 11:26:12.706 +08:00] [INFO] [main.go:107] ["get checksum for the upstream success"] [elapsed=15.2452ms]
[2024/05/05 11:26:12.710 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test.t1] [checkSum=3030946575]
[2024/05/05 11:26:12.712 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test.t2] [checkSum=718014124]
[2024/05/05 11:26:12.714 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test.t3] [checkSum=718014124]
[2024/05/05 11:26:12.719 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test1.finishmark] [checkSum=0]
[2024/05/05 11:26:12.721 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test1.t1] [checkSum=718014124]
[2024/05/05 11:26:12.721 +08:00] [INFO] [main.go:116] ["get checksum for the downstream success"] [elapsed=14.700002ms]
[2024/05/05 11:26:12.721 +08:00] [INFO] [main.go:95] ["compare checksum passed"]
wait process cdc.test exit for 1-th time...
wait process cdc.test exit for 2-th time...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:26:14 CST 2024] <<<<<< run test case kafka_column_selector success! >>>>>>
+ set +x
+ tso='449545392822943745
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545392822943745 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
[Sun May  5 11:26:22 CST 2024] <<<<<< START cdc server in tiflash case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.tiflash.28782880.out server --log-file /tmp/tidb_cdc_test/tiflash/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/tiflash/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:26:22 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/a9452c9b-8d92-4efd-89e3-9db911a908a0
	{"id":"a9452c9b-8d92-4efd-89e3-9db911a908a0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879579}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c98eeec9
	a9452c9b-8d92-4efd-89e3-9db911a908a0

/tidb/cdc/default/default/upstream/7365351657510964239
	{"id":7365351657510964239,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/a9452c9b-8d92-4efd-89e3-9db911a908a0
	{"id":"a9452c9b-8d92-4efd-89e3-9db911a908a0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879579}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c98eeec9
	a9452c9b-8d92-4efd-89e3-9db911a908a0

/tidb/cdc/default/default/upstream/7365351657510964239
	{"id":7365351657510964239,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/a9452c9b-8d92-4efd-89e3-9db911a908a0
	{"id":"a9452c9b-8d92-4efd-89e3-9db911a908a0","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879579}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c98eeec9
	a9452c9b-8d92-4efd-89e3-9db911a908a0

/tidb/cdc/default/default/upstream/7365351657510964239
	{"id":7365351657510964239,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
[Sun May  5 11:26:22 CST 2024] <<<<<< START kafka consumer in changefeed_finish case >>>>>>
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2721480014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-klk36-npv9p, pid:1459, start at 2024-05-05 11:26:20.286972613 +0800 CST m=+6.044550901	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:20.298 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:20.293 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:20.293 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2721700013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-klk36-npv9p, pid:1532, start at 2024-05-05 11:26:20.289074689 +0800 CST m=+5.981780508	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:20.298 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:20.302 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:20.302 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/many_pk_or_uk/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/many_pk_or_uk/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/many_pk_or_uk/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/many_pk_or_uk/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/many_pk_or_uk/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table test.t2 not exists for 6-th check, retry later
check diff failed 3-th time, retry later
table mark.finish_mark_1 not exists for 4-th check, retry later
check diff failed 1-th time, retry later
+ set +x
check_changefeed_state http://127.0.0.1:2379 changefeed-error-2 failed [CDC:ErrSnapshotLostByGC]
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=changefeed-error-2
+ expected_state=failed
+ error_msg='[CDC:ErrSnapshotLostByGC]'
+ tls_dir='[CDC:ErrSnapshotLostByGC]'
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-error-2 -s
+ info='{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-error-2",
  "state": "failed",
  "checkpoint_tso": 449545380868128769,
  "checkpoint_time": "2024-05-05 11:25:35.172",
  "error": {
    "time": "2024-05-05T11:26:21.826781119+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrSnapshotLostByGC",
    "message": "[CDC:ErrSnapshotLostByGC]fail to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449545380868128769 is earlier than or equal to GC safepoint at 9223372036854775807"
  }
}'
+ echo '{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-error-2",
  "state": "failed",
  "checkpoint_tso": 449545380868128769,
  "checkpoint_time": "2024-05-05 11:25:35.172",
  "error": {
    "time": "2024-05-05T11:26:21.826781119+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrSnapshotLostByGC",
    "message": "[CDC:ErrSnapshotLostByGC]fail to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449545380868128769 is earlier than or equal to GC safepoint at 9223372036854775807"
  }
}'
{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-error-2",
  "state": "failed",
  "checkpoint_tso": 449545380868128769,
  "checkpoint_time": "2024-05-05 11:25:35.172",
  "error": {
    "time": "2024-05-05T11:26:21.826781119+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrSnapshotLostByGC",
    "message": "[CDC:ErrSnapshotLostByGC]fail to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449545380868128769 is earlier than or equal to GC safepoint at 9223372036854775807"
  }
}
++ jq -r .state
++ echo '{' '"upstream_id":' 7365351471431534517, '"namespace":' '"default",' '"id":' '"changefeed-error-2",' '"state":' '"failed",' '"checkpoint_tso":' 449545380868128769, '"checkpoint_time":' '"2024-05-05' '11:25:35.172",' '"error":' '{' '"time":' '"2024-05-05T11:26:21.826781119+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrSnapshotLostByGC",' '"message":' '"[CDC:ErrSnapshotLostByGC]fail' to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449545380868128769 is earlier than or equal to GC safepoint at '9223372036854775807"' '}' '}'
+ state=failed
+ [[ ! failed == \f\a\i\l\e\d ]]
++ echo '{' '"upstream_id":' 7365351471431534517, '"namespace":' '"default",' '"id":' '"changefeed-error-2",' '"state":' '"failed",' '"checkpoint_tso":' 449545380868128769, '"checkpoint_time":' '"2024-05-05' '11:25:35.172",' '"error":' '{' '"time":' '"2024-05-05T11:26:21.826781119+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrSnapshotLostByGC",' '"message":' '"[CDC:ErrSnapshotLostByGC]fail' to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449545380868128769 is earlier than or equal to GC safepoint at '9223372036854775807"' '}' '}'
++ jq -r .error.message
+ message='[CDC:ErrSnapshotLostByGC]fail to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449545380868128769 is earlier than or equal to GC safepoint at 9223372036854775807'
+ [[ ! [CDC:ErrSnapshotLostByGC]fail to create or maintain changefeed due to snapshot loss caused by GC. checkpoint-ts 449545380868128769 is earlier than or equal to GC safepoint at 9223372036854775807 =~ \[CDC:ErrSnapshotLostByGC] ]]
run task successfully
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6766.out cli changefeed remove -c changefeed-error-2
The 1 times to try to start tidb cluster...
Changefeed remove successfully.
ID: changefeed-error-2
CheckpointTs: 449545380868128769
SinkURI: kafka://127.0.0.1:9092/ticdc-sink-retry-test-28424?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:26:24 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/d6fcd068-06e8-4305-9b1c-5c7bfb879c51
	{"id":"d6fcd068-06e8-4305-9b1c-5c7bfb879c51","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879581}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c988adcf
	d6fcd068-06e8-4305-9b1c-5c7bfb879c51

/tidb/cdc/default/default/upstream/7365351652572687333
	{"id":7365351652572687333,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/d6fcd068-06e8-4305-9b1c-5c7bfb879c51
	{"id":"d6fcd068-06e8-4305-9b1c-5c7bfb879c51","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879581}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c988adcf
	d6fcd068-06e8-4305-9b1c-5c7bfb879c51

/tidb/cdc/default/default/upstream/7365351652572687333
	{"id":7365351652572687333,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/d6fcd068-06e8-4305-9b1c-5c7bfb879c51
	{"id":"d6fcd068-06e8-4305-9b1c-5c7bfb879c51","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879581}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c988adcf
	d6fcd068-06e8-4305-9b1c-5c7bfb879c51

/tidb/cdc/default/default/upstream/7365351652572687333
	{"id":7365351652572687333,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
table test.t2 not exists for 7-th check, retry later
check diff failed 4-th time, retry later
table mark.finish_mark_1 not exists for 5-th check, retry later
Create changefeed successfully!
ID: cd56bb17-5122-4879-8b6d-912cfe247645
Info: {"upstream_id":7365351652572687333,"namespace":"default","id":"cd56bb17-5122-4879-8b6d-912cfe247645","sink_uri":"kafka://127.0.0.1:9092/ticdc-new_ci_collation-test-7116?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:26:24.56420678+08:00","start_ts":449545392869867521,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545392869867521,"checkpoint_ts":449545392869867521,"checkpoint_time":"2024-05-05 11:26:20.955"}
[Sun May  5 11:26:24 CST 2024] <<<<<< START kafka consumer in new_ci_collation case >>>>>>
check diff failed 2-th time, retry later
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.many_pk_or_uk.cli.2867.out cli tso query --pd=http://127.0.0.1:2379
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:26:25 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/0982a6a9-8e1e-471d-a09d-23d0ceb2c491
	{"id":"0982a6a9-8e1e-471d-a09d-23d0ceb2c491","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879582}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c98ab6d8
	0982a6a9-8e1e-471d-a09d-23d0ceb2c491

/tidb/cdc/default/default/upstream/7365351644237022491
	{"id":7365351644237022491,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/0982a6a9-8e1e-471d-a09d-23d0ceb2c491
	{"id":"0982a6a9-8e1e-471d-a09d-23d0ceb2c491","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879582}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c98ab6d8
	0982a6a9-8e1e-471d-a09d-23d0ceb2c491

/tidb/cdc/default/default/upstream/7365351644237022491
	{"id":7365351644237022491,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/0982a6a9-8e1e-471d-a09d-23d0ceb2c491
	{"id":"0982a6a9-8e1e-471d-a09d-23d0ceb2c491","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879582}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c98ab6d8
	0982a6a9-8e1e-471d-a09d-23d0ceb2c491

/tidb/cdc/default/default/upstream/7365351644237022491
	{"id":7365351644237022491,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
Create changefeed successfully!
ID: 22fa85b1-5bcf-47c2-a876-771ae4ad592f
Info: {"upstream_id":7365351644237022491,"namespace":"default","id":"22fa85b1-5bcf-47c2-a876-771ae4ad592f","sink_uri":"kafka://127.0.0.1:9092/ticdc-tiflash-test-28410?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:26:25.542011762+08:00","start_ts":449545392822943745,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545392822943745,"checkpoint_ts":449545392822943745,"checkpoint_time":"2024-05-05 11:26:20.776"}
[Sun May  5 11:26:25 CST 2024] <<<<<< START kafka consumer in tiflash case >>>>>>
+ set +x
table cdc_tiflash_test.multi_data_type not exists for 1-th check, retry later
table new_ci_collation_test.t1 not exists for 1-th check, retry later
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_column_selector/run.sh: line 1: 17189 Killed                  cdc_kafka_consumer --upstream-uri $SINK_URI --downstream-uri="mysql://root@127.0.0.1:3306/?safe-mode=true&batch-dml-enable=false" --upstream-tidb-dsn="root@tcp(${UP_TIDB_HOST}:${UP_TIDB_PORT})/?" --config="$CUR/conf/changefeed.toml" 2>&1  (wd: /tmp/tidb_cdc_test/kafka_column_selector)
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_column_selector_avro/run.sh using Sink-Type: kafka... <<=================
Starting schema registry...
* About to connect() to 127.0.0.1 port 8088 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8088; Connection refused
* Closing connection 0
wait process cdc.test exit for 1-th time...
table test.t2 not exists for 8-th check, retry later
+ set +x
+ tso='449545393928142849
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545393928142849 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
[Sun May  5 11:26:26 CST 2024] <<<<<< START cdc server in many_pk_or_uk case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ GO_FAILPOINTS=
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.many_pk_or_uk.29052907.out server --log-file /tmp/tidb_cdc_test/many_pk_or_uk/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/many_pk_or_uk/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
table mark.finish_mark_1 exists
table mark.finish_mark_2 not exists for 1-th check, retry later
wait process cdc.test exit for 2-th time...
check diff failed 5-th time, retry later
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:26:27 CST 2024] <<<<<< START cdc server in changefeed_error case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/redo/ChangefeedNewRedoManagerError=2*return(true)'
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.68166818.out server --log-file /tmp/tidb_cdc_test/changefeed_error/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_error/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
check diff successfully
start tidb cluster in /tmp/tidb_cdc_test/multi_source
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table cdc_tiflash_test.multi_data_type not exists for 2-th check, retry later
table new_ci_collation_test.t1 exists
TEST FAILED: OUTPUT DOES NOT CONTAIN 'id: 1'
____________________________________
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
check data failed 1-th time, retry later
check data successfully
wait process cdc.test exit for 1-th time...
wait process cdc.test exit for 2-th time...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:26:25 CST 2024] <<<<<< run test case ddl_puller_lag success! >>>>>>
* About to connect() to 127.0.0.1 port 8088 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8088; Connection refused
* Closing connection 0
table new_ci_collation_test.t2 not exists for 1-th check, retry later
table test.t2 not exists for 9-th check, retry later
check diff successfully
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:26:29 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/1db20a74-bdaf-4b38-b830-ece15cb4af68
	{"id":"1db20a74-bdaf-4b38-b830-ece15cb4af68","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879586}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c9990ad2
	1db20a74-bdaf-4b38-b830-ece15cb4af68

/tidb/cdc/default/default/upstream/7365351675144565425
	{"id":7365351675144565425,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/1db20a74-bdaf-4b38-b830-ece15cb4af68
	{"id":"1db20a74-bdaf-4b38-b830-ece15cb4af68","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879586}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c9990ad2
	1db20a74-bdaf-4b38-b830-ece15cb4af68

/tidb/cdc/default/default/upstream/7365351675144565425
	{"id":7365351675144565425,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ grep -q 'etcd info'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/1db20a74-bdaf-4b38-b830-ece15cb4af68
	{"id":"1db20a74-bdaf-4b38-b830-ece15cb4af68","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879586}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c9990ad2
	1db20a74-bdaf-4b38-b830-ece15cb4af68

/tidb/cdc/default/default/upstream/7365351675144565425
	{"id":7365351675144565425,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.many_pk_or_uk.cli.2957.out cli changefeed create --start-ts=449545393928142849 '--sink-uri=kafka://127.0.0.1:9092/ticdc-many-pk-or-uk-test-24585?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760'
* About to connect() to 127.0.0.1 port 8088 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8088 (#0)
> GET / HTTP/1.1
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8088
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:26:29 GMT
< Content-Type: application/vnd.schemaregistry.v1+json
< Vary: Accept-Encoding, User-Agent
< Content-Length: 2
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100    49  100    24  100    25    309    322 --:--:-- --:--:-- --:--:--   324
{"compatibility":"NONE"}The 1 times to try to start tidb cluster...
table new_ci_collation_test.t2 exists
table new_ci_collation_test.t3 not exists for 1-th check, retry later
Create changefeed successfully!
ID: 926ffe5b-ac19-4627-b0a1-c702b1a22c08
Info: {"upstream_id":7365351675144565425,"namespace":"default","id":"926ffe5b-ac19-4627-b0a1-c702b1a22c08","sink_uri":"kafka://127.0.0.1:9092/ticdc-many-pk-or-uk-test-24585?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:26:30.205932402+08:00","start_ts":449545393928142849,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545393928142849,"checkpoint_ts":449545393928142849,"checkpoint_time":"2024-05-05 11:26:24.992"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
table cdc_tiflash_test.multi_data_type exists
check diff successfully
wait process cdc.test exit for 1-th time...
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:26:30 GMT
< Content-Length: 883
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/f5ab990a-9a08-4839-b931-06f6712f405c
	{"id":"f5ab990a-9a08-4839-b931-06f6712f405c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879587}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count
	7

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8e4b315
	f5ab990a-9a08-4839-b931-06f6712f405c

/tidb/cdc/default/default/upstream/7365351471431534517
	{"id":7365351471431534517,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/f5ab990a-9a08-4839-b931-06f6712f405c
	{"id":"f5ab990a-9a08-4839-b931-06f6712f405c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879587}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count
	7

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8e4b315
	f5ab990a-9a08-4839-b931-06f6712f405c

/tidb/cdc/default/default/upstream/7365351471431534517
	{"id":7365351471431534517,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/f5ab990a-9a08-4839-b931-06f6712f405c
	{"id":"f5ab990a-9a08-4839-b931-06f6712f405c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879587}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/meta/ticdc-delete-etcd-key-count
	7

/tidb/cdc/default/__cdc_meta__/owner/22318f46c8e4b315
	f5ab990a-9a08-4839-b931-06f6712f405c

/tidb/cdc/default/default/upstream/7365351471431534517
	{"id":7365351471431534517,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.6872.out cli changefeed create --start-ts=0 '--sink-uri=kafka://127.0.0.1:9092/ticdc-sink-retry-test-28424?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c changefeed-initialize-error
table test.t2 not exists for 10-th check, retry later
table mark.finish_mark_2 not exists for 2-th check, retry later
Create changefeed successfully!
ID: changefeed-initialize-error
Info: {"upstream_id":7365351471431534517,"namespace":"default","id":"changefeed-initialize-error","sink_uri":"kafka://127.0.0.1:9092/ticdc-sink-retry-test-28424?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:26:30.740290213+08:00","start_ts":449545395404275717,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545395404275717,"checkpoint_ts":449545395404275717,"checkpoint_time":"2024-05-05 11:26:30.623"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
check diff failed 1-th time, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
wait process cdc.test exit for 2-th time...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:26:31 CST 2024] <<<<<< run test case tiflash success! >>>>>>
table test.finish_mark not exists for 1-th check, retry later
+ set +x
[Sun May  5 11:26:31 CST 2024] <<<<<< START kafka consumer in many_pk_or_uk case >>>>>>
go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d
go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f
go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b
go: downloading github.com/BurntSushi/toml v1.3.2
go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4
go: downloading go.uber.org/zap v1.27.0
go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1
go: downloading go.uber.org/atomic v1.11.0
table test.finish_mark not exists for 2-th check, retry later
go: downloading go.uber.org/multierr v1.11.0
go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c
go: downloading google.golang.org/grpc v1.62.1
go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5
go: downloading github.com/coreos/go-semver v0.3.1
go: downloading github.com/go-sql-driver/mysql v1.7.1
table mark.finish_mark_2 not exists for 3-th check, retry later
go: downloading github.com/golang/protobuf v1.5.4
go: downloading golang.org/x/net v0.24.0
go: downloading google.golang.org/protobuf v1.33.0
go: downloading golang.org/x/sys v0.19.0
go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda
go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda
+ set +x
check_changefeed_state http://127.0.0.1:2379 changefeed-initialize-error normal null
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=changefeed-initialize-error
+ expected_state=normal
+ error_msg=null
+ tls_dir=null
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-initialize-error -s
go: downloading golang.org/x/text v0.14.0
+ info='{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-initialize-error",
  "state": "warning",
  "checkpoint_tso": 449545395404275717,
  "checkpoint_time": "2024-05-05 11:26:30.623",
  "error": {
    "time": "2024-05-05T11:26:30.923047936+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrProcessorUnknown",
    "message": "changefeed new redo manager injected error"
  }
}'
+ echo '{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-initialize-error",
  "state": "warning",
  "checkpoint_tso": 449545395404275717,
  "checkpoint_time": "2024-05-05 11:26:30.623",
  "error": {
    "time": "2024-05-05T11:26:30.923047936+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrProcessorUnknown",
    "message": "changefeed new redo manager injected error"
  }
}'
{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-initialize-error",
  "state": "warning",
  "checkpoint_tso": 449545395404275717,
  "checkpoint_time": "2024-05-05 11:26:30.623",
  "error": {
    "time": "2024-05-05T11:26:30.923047936+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrProcessorUnknown",
    "message": "changefeed new redo manager injected error"
  }
}
++ echo '{' '"upstream_id":' 7365351471431534517, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"warning",' '"checkpoint_tso":' 449545395404275717, '"checkpoint_time":' '"2024-05-05' '11:26:30.623",' '"error":' '{' '"time":' '"2024-05-05T11:26:30.923047936+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrProcessorUnknown",' '"message":' '"changefeed' new redo manager injected 'error"' '}' '}'
++ jq -r .state
+ state=warning
+ [[ ! warning == \n\o\r\m\a\l ]]
+ echo 'changefeed state warning does not equal to normal'
changefeed state warning does not equal to normal
+ exit 1
run task failed 1-th time, retry later
table new_ci_collation_test.t3 exists
table new_ci_collation_test.t4 not exists for 1-th check, retry later
table test.t2 not exists for 11-th check, retry later
check diff failed 2-th time, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
start tidb cluster in /tmp/tidb_cdc_test/kafka_column_selector_avro
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table test.finish_mark not exists for 3-th check, retry later
go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548
go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8
go: downloading github.com/spf13/pflag v1.0.5
go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22
go: downloading golang.org/x/sync v0.7.0
go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5
go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50
go: downloading github.com/prometheus/client_golang v1.19.0
go: downloading go.etcd.io/etcd/client/v3 v3.5.12
go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e
go: downloading github.com/shirou/gopsutil/v3 v3.24.2
go: downloading github.com/docker/go-units v0.5.0
go: downloading gopkg.in/yaml.v2 v2.4.0
go: downloading github.com/jellydator/ttlcache/v3 v3.0.1
go: downloading github.com/coocood/freecache v1.2.1
go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754
go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5
go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a
go: downloading github.com/opentracing/opentracing-go v1.2.0
go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0
go: downloading github.com/opentracing/basictracer-go v1.1.0
go: downloading github.com/prometheus/client_model v0.6.1
go: downloading github.com/stretchr/testify v1.9.0
go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible
go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7
go: downloading github.com/scalalang2/golang-fifo v0.1.5
go: downloading github.com/tidwall/btree v1.7.0
go: downloading github.com/cockroachdb/errors v1.11.1
go: downloading github.com/influxdata/tdigest v0.0.1
go: downloading github.com/google/uuid v1.6.0
go: downloading github.com/gorilla/mux v1.8.0
go: downloading github.com/google/btree v1.1.2
go: downloading github.com/twmb/murmur3 v1.1.6
go: downloading cloud.google.com/go/storage v1.39.1
go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1
go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1
go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0
go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581
go: downloading github.com/aws/aws-sdk-go v1.50.0
go: downloading github.com/go-resty/resty/v2 v2.11.0
go: downloading github.com/klauspost/compress v1.17.8
go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b
go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9
go: downloading google.golang.org/api v0.170.0
go: downloading golang.org/x/oauth2 v0.18.0
go: downloading cloud.google.com/go v0.112.2
go: downloading github.com/gogo/protobuf v1.3.2
go: downloading golang.org/x/tools v0.20.0
go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117
go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef
go: downloading go.etcd.io/etcd/api/v3 v3.5.12
go: downloading github.com/golang/snappy v0.0.4
go: downloading github.com/dolthub/swiss v0.2.1
go: downloading github.com/dgraph-io/ristretto v0.1.1
go: downloading github.com/cespare/xxhash/v2 v2.3.0
go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec
go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc
go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12
go: downloading gopkg.in/yaml.v3 v3.0.1
go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2
go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1
go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1
go: downloading golang.org/x/crypto v0.22.0
go: downloading github.com/beorn7/perks v1.0.1
go: downloading github.com/prometheus/common v0.52.2
go: downloading github.com/prometheus/procfs v0.13.0
go: downloading go.uber.org/mock v0.4.0
go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21
check_changefeed_state http://127.0.0.1:2379 changefeed-initialize-error normal null
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=changefeed-initialize-error
+ expected_state=normal
+ error_msg=null
+ tls_dir=null
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-initialize-error -s
table new_ci_collation_test.t4 exists
table new_ci_collation_test.t5 not exists for 1-th check, retry later
table test.t2 not exists for 12-th check, retry later
go: downloading golang.org/x/time v0.5.0
go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2
go: downloading github.com/otiai10/copy v1.2.0
go: downloading github.com/pkg/errors v0.9.1
go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible
go: downloading github.com/cockroachdb/pebble v1.1.0
go: downloading github.com/jfcg/sorty/v2 v2.1.0
go: downloading github.com/carlmjohnson/flagext v0.21.0
go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b
go: downloading github.com/cockroachdb/redact v1.1.5
go: downloading github.com/getsentry/sentry-go v0.27.0
go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df
go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2
go: downloading github.com/tklauser/go-sysconf v0.3.12
go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13
go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358
go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4
go: downloading github.com/cheggaaa/pb/v3 v3.0.8
go: downloading github.com/cloudfoundry/gosigar v1.3.6
go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815
go: downloading github.com/dolthub/maphash v0.1.0
go: downloading github.com/robfig/cron/v3 v3.0.1
go: downloading cloud.google.com/go/compute/metadata v0.2.3
go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d
go: downloading cloud.google.com/go/compute v1.25.1
go: downloading github.com/spkg/bom v1.0.0
go: downloading github.com/xitongsys/parquet-go v1.6.0
go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda
go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f
go: downloading github.com/robfig/cron v1.2.0
go: downloading github.com/kr/pretty v0.3.1
go: downloading github.com/coreos/go-systemd/v22 v22.5.0
go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989
go: downloading github.com/jfcg/sixb v1.3.8
go: downloading cloud.google.com/go/iam v1.1.7
go: downloading github.com/googleapis/gax-go/v2 v2.12.3
go: downloading github.com/VividCortex/ewma v1.2.0
go: downloading github.com/fatih/color v1.16.0
go: downloading github.com/mattn/go-colorable v0.1.13
go: downloading github.com/mattn/go-isatty v0.0.20
go: downloading github.com/mattn/go-runewidth v0.0.15
go: downloading github.com/tklauser/numcpus v0.6.1
go: downloading github.com/kylelemons/godebug v1.1.0
go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c
go: downloading github.com/kr/text v0.2.0
go: downloading github.com/rogpeppe/go-internal v1.12.0
go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac
go: downloading go.opentelemetry.io/otel v1.24.0
go: downloading go.opentelemetry.io/otel/trace v1.24.0
go: downloading github.com/apache/thrift v0.16.0
go: downloading github.com/golang-jwt/jwt/v5 v5.2.0
go: downloading github.com/dustin/go-humanize v1.0.1
go: downloading github.com/golang/glog v1.2.0
go: downloading github.com/rivo/uniseg v0.4.7
go: downloading github.com/lestrrat-go/blackmagic v1.0.2
go: downloading github.com/lestrrat-go/httprc v1.0.5
go: downloading github.com/lestrrat-go/iter v1.0.2
go: downloading github.com/lestrrat-go/option v1.0.1
go: downloading github.com/lestrrat-go/httpcc v1.0.1
go: downloading github.com/klauspost/cpuid v1.3.1
go: downloading github.com/ncw/directio v1.0.5
go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2
go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64
go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible
+ info='{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-initialize-error",
  "state": "warning",
  "checkpoint_tso": 449545395404275717,
  "checkpoint_time": "2024-05-05 11:26:30.623",
  "error": {
    "time": "2024-05-05T11:26:30.923047936+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrProcessorUnknown",
    "message": "changefeed new redo manager injected error"
  }
}'
+ echo '{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-initialize-error",
  "state": "warning",
  "checkpoint_tso": 449545395404275717,
  "checkpoint_time": "2024-05-05 11:26:30.623",
  "error": {
    "time": "2024-05-05T11:26:30.923047936+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrProcessorUnknown",
    "message": "changefeed new redo manager injected error"
  }
}'
{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-initialize-error",
  "state": "warning",
  "checkpoint_tso": 449545395404275717,
  "checkpoint_time": "2024-05-05 11:26:30.623",
  "error": {
    "time": "2024-05-05T11:26:30.923047936+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrProcessorUnknown",
    "message": "changefeed new redo manager injected error"
  }
}
++ echo '{' '"upstream_id":' 7365351471431534517, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"warning",' '"checkpoint_tso":' 449545395404275717, '"checkpoint_time":' '"2024-05-05' '11:26:30.623",' '"error":' '{' '"time":' '"2024-05-05T11:26:30.923047936+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrProcessorUnknown",' '"message":' '"changefeed' new redo manager injected 'error"' '}' '}'
++ jq -r .state
+ state=warning
+ [[ ! warning == \n\o\r\m\a\l ]]
+ echo 'changefeed state warning does not equal to normal'
changefeed state warning does not equal to normal
+ exit 1
run task failed 2-th time, retry later
table mark.finish_mark_2 not exists for 4-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da
go: downloading github.com/go-logr/logr v1.4.1
go: downloading go.opentelemetry.io/otel/metric v1.24.0
go: downloading github.com/go-logr/stdr v1.2.2
go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06
go: downloading github.com/DataDog/zstd v1.5.5
check diff failed 3-th time, retry later
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/run.sh using Sink-Type: kafka... <<=================
+++ dirname /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/run.sh
++ cd /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo
++ pwd
+ CUR=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo
+ source /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/../_utils/test_prepare
++ UP_TIDB_HOST=127.0.0.1
++ UP_TIDB_PORT=4000
++ UP_TIDB_OTHER_PORT=4001
++ UP_TIDB_STATUS=10080
++ UP_TIDB_OTHER_STATUS=10081
++ DOWN_TIDB_HOST=127.0.0.1
++ DOWN_TIDB_PORT=3306
++ DOWN_TIDB_STATUS=20080
++ TLS_TIDB_HOST=127.0.0.1
++ TLS_TIDB_PORT=3307
++ TLS_TIDB_STATUS=30080
++ UP_PD_HOST_1=127.0.0.1
++ UP_PD_PORT_1=2379
++ UP_PD_PEER_PORT_1=2380
++ UP_PD_HOST_2=127.0.0.1
++ UP_PD_PORT_2=2679
++ UP_PD_PEER_PORT_2=2680
++ UP_PD_HOST_3=127.0.0.1
++ UP_PD_PORT_3=2779
++ UP_PD_PEER_PORT_3=2780
++ DOWN_PD_HOST=127.0.0.1
++ DOWN_PD_PORT=2479
++ DOWN_PD_PEER_PORT=2480
++ TLS_PD_HOST=127.0.0.1
++ TLS_PD_PORT=2579
++ TLS_PD_PEER_PORT=2580
++ UP_TIKV_HOST_1=127.0.0.1
++ UP_TIKV_PORT_1=20160
++ UP_TIKV_STATUS_PORT_1=20181
++ UP_TIKV_HOST_2=127.0.0.1
++ UP_TIKV_PORT_2=20161
++ UP_TIKV_STATUS_PORT_2=20182
++ UP_TIKV_HOST_3=127.0.0.1
++ UP_TIKV_PORT_3=20162
++ UP_TIKV_STATUS_PORT_3=20183
++ DOWN_TIKV_HOST=127.0.0.1
++ DOWN_TIKV_PORT=21160
++ DOWN_TIKV_STATUS_PORT=21180
++ TLS_TIKV_HOST=127.0.0.1
++ TLS_TIKV_PORT=22160
++ TLS_TIKV_STATUS_PORT=22180
+++ cat /tmp/tidb_cdc_test/KAFKA_VERSION
+++ echo 2.4.1
++ KAFKA_VERSION=2.4.1
+ WORK_DIR=/tmp/tidb_cdc_test/synced_status_with_redo
+ CDC_BINARY=cdc.test
+ SINK_TYPE=kafka
+ CDC_COUNT=3
+ DB_COUNT=4
+ trap stop_tidb_cluster EXIT
+ run_normal_case_and_unavailable_pd conf/changefeed-redo.toml
+ rm -rf /tmp/tidb_cdc_test/synced_status_with_redo
+ mkdir -p /tmp/tidb_cdc_test/synced_status_with_redo
+ start_tidb_cluster --workdir /tmp/tidb_cdc_test/synced_status_with_redo
The 1 times to try to start tidb cluster...
start tidb cluster in /tmp/tidb_cdc_test/synced_status_with_redo
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table new_ci_collation_test.t5 exists
check diff failed 1-th time, retry later
table test.finish_mark not exists for 4-th check, retry later
table test.t2 exists
check diff successfully
table mark.finish_mark_2 not exists for 5-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
wait process cdc.test exit for 1-th time...
Verifying downstream PD is started...
Starting Upstream TiKV...
Verifying downstream PD is started...
wait process cdc.test exit for 2-th time...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
check diff failed 4-th time, retry later
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/ddl_only_block_related_table/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
wait process cdc.test exit for 3-th time...
table test.finish_mark not exists for 5-th check, retry later
table mark.finish_mark_2 not exists for 6-th check, retry later
cdc.test: no process found
wait process cdc.test exit for 4-th time...
process cdc.test already exit
[Sun May  5 11:26:38 CST 2024] <<<<<< run test case resolve_lock success! >>>>>>
check_changefeed_state http://127.0.0.1:2379 changefeed-initialize-error normal null
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=changefeed-initialize-error
+ expected_state=normal
+ error_msg=null
+ tls_dir=null
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-initialize-error -s
check diff failed 2-th time, retry later
+ info='{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-initialize-error",
  "state": "normal",
  "checkpoint_tso": 449545397252390916,
  "checkpoint_time": "2024-05-05 11:26:37.673",
  "error": null
}'
+ echo '{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-initialize-error",
  "state": "normal",
  "checkpoint_tso": 449545397252390916,
  "checkpoint_time": "2024-05-05 11:26:37.673",
  "error": null
}'
{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-initialize-error",
  "state": "normal",
  "checkpoint_tso": 449545397252390916,
  "checkpoint_time": "2024-05-05 11:26:37.673",
  "error": null
}
++ echo '{' '"upstream_id":' 7365351471431534517, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"normal",' '"checkpoint_tso":' 449545397252390916, '"checkpoint_time":' '"2024-05-05' '11:26:37.673",' '"error":' null '}'
++ jq -r .state
+ state=normal
+ [[ ! normal == \n\o\r\m\a\l ]]
++ echo '{' '"upstream_id":' 7365351471431534517, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"normal",' '"checkpoint_tso":' 449545397252390916, '"checkpoint_time":' '"2024-05-05' '11:26:37.673",' '"error":' null '}'
++ jq -r .error.message
+ message=null
+ [[ ! null =~ null ]]
run task successfully
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.7056.out cli changefeed pause -c changefeed-initialize-error
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
go: downloading github.com/google/s2a-go v0.1.7
go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2
go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0
go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0
go: downloading github.com/felixge/httpsnoop v1.0.4
go: downloading github.com/jmespath/go-jmespath v0.4.0
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
check diff failed 5-th time, retry later
start tidb cluster in /tmp/tidb_cdc_test/ddl_only_block_related_table
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table test.finish_mark not exists for 6-th check, retry later
go: downloading github.com/modern-go/reflect2 v1.0.2
go: downloading github.com/json-iterator/go v1.1.12
go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd
table mark.finish_mark_2 not exists for 7-th check, retry later
check diff failed 3-th time, retry later
+ set +x
check_changefeed_state http://127.0.0.1:2379 changefeed-initialize-error stopped changefeed new redo manager injected error
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=changefeed-initialize-error
+ expected_state=stopped
+ error_msg=changefeed
+ tls_dir=error
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-initialize-error -s
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b28341c0013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pfbpq-pmpwv, pid:7263, start at 2024-05-05 11:26:37.868652681 +0800 CST m=+5.245211325	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:37.878 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:37.881 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:37.881 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b28341c0013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pfbpq-pmpwv, pid:7263, start at 2024-05-05 11:26:37.868652681 +0800 CST m=+5.245211325	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:37.878 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:37.881 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:37.881 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2837140014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pfbpq-pmpwv, pid:7320, start at 2024-05-05 11:26:38.045438477 +0800 CST m=+5.372683312	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:38.054 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:38.021 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:38.021 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/multi_source/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/multi_source/tiflash/log/error.log
arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_source/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_source/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_source/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
+ info='{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-initialize-error",
  "state": "stopped",
  "checkpoint_tso": 449545397514272771,
  "checkpoint_time": "2024-05-05 11:26:38.672",
  "error": {
    "time": "2024-05-05T11:26:30.923047936+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrProcessorUnknown",
    "message": "changefeed new redo manager injected error"
  }
}'
+ echo '{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-initialize-error",
  "state": "stopped",
  "checkpoint_tso": 449545397514272771,
  "checkpoint_time": "2024-05-05 11:26:38.672",
  "error": {
    "time": "2024-05-05T11:26:30.923047936+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrProcessorUnknown",
    "message": "changefeed new redo manager injected error"
  }
}'
{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-initialize-error",
  "state": "stopped",
  "checkpoint_tso": 449545397514272771,
  "checkpoint_time": "2024-05-05 11:26:38.672",
  "error": {
    "time": "2024-05-05T11:26:30.923047936+08:00",
    "addr": "127.0.0.1:8300",
    "code": "CDC:ErrProcessorUnknown",
    "message": "changefeed new redo manager injected error"
  }
}
++ echo '{' '"upstream_id":' 7365351471431534517, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"stopped",' '"checkpoint_tso":' 449545397514272771, '"checkpoint_time":' '"2024-05-05' '11:26:38.672",' '"error":' '{' '"time":' '"2024-05-05T11:26:30.923047936+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrProcessorUnknown",' '"message":' '"changefeed' new redo manager injected 'error"' '}' '}'
++ jq -r .state
+ state=stopped
+ [[ ! stopped == \s\t\o\p\p\e\d ]]
++ echo '{' '"upstream_id":' 7365351471431534517, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"stopped",' '"checkpoint_tso":' 449545397514272771, '"checkpoint_time":' '"2024-05-05' '11:26:38.672",' '"error":' '{' '"time":' '"2024-05-05T11:26:30.923047936+08:00",' '"addr":' '"127.0.0.1:8300",' '"code":' '"CDC:ErrProcessorUnknown",' '"message":' '"changefeed' new redo manager injected 'error"' '}' '}'
++ jq -r .error.message
+ message='changefeed new redo manager injected error'
+ [[ ! changefeed new redo manager injected error =~ changefeed ]]
run task successfully
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.7142.out cli changefeed resume -c changefeed-initialize-error
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
check diff successfully
Verifying downstream PD is started...
table test.finish_mark not exists for 7-th check, retry later
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
PASS
coverage: 2.1% of statements in github.com/pingcap/tiflow/...
table mark.finish_mark_2 exists
table mark.finish_mark_3 not exists for 1-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
check diff failed 4-th time, retry later
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_source.cli.8634.out cli tso query --pd=http://127.0.0.1:2379
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
check diff failed 1-th time, retry later
+ set +x
check_changefeed_state http://127.0.0.1:2379 changefeed-initialize-error normal null
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=changefeed-initialize-error
+ expected_state=normal
+ error_msg=null
+ tls_dir=null
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c changefeed-initialize-error -s
+ info='{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-initialize-error",
  "state": "normal",
  "checkpoint_tso": 449545397514272771,
  "checkpoint_time": "2024-05-05 11:26:38.672",
  "error": null
}'
+ echo '{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-initialize-error",
  "state": "normal",
  "checkpoint_tso": 449545397514272771,
  "checkpoint_time": "2024-05-05 11:26:38.672",
  "error": null
}'
{
  "upstream_id": 7365351471431534517,
  "namespace": "default",
  "id": "changefeed-initialize-error",
  "state": "normal",
  "checkpoint_tso": 449545397514272771,
  "checkpoint_time": "2024-05-05 11:26:38.672",
  "error": null
}
++ echo '{' '"upstream_id":' 7365351471431534517, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"normal",' '"checkpoint_tso":' 449545397514272771, '"checkpoint_time":' '"2024-05-05' '11:26:38.672",' '"error":' null '}'
++ jq -r .state
+ state=normal
+ [[ ! normal == \n\o\r\m\a\l ]]
++ echo '{' '"upstream_id":' 7365351471431534517, '"namespace":' '"default",' '"id":' '"changefeed-initialize-error",' '"state":' '"normal",' '"checkpoint_tso":' 449545397514272771, '"checkpoint_time":' '"2024-05-05' '11:26:38.672",' '"error":' null '}'
++ jq -r .error.message
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/region_merge/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
+ message=null
+ [[ ! null =~ null ]]
run task successfully
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_error.cli.7224.out cli changefeed remove -c changefeed-initialize-error
table test.finish_mark not exists for 8-th check, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Changefeed remove successfully.
ID: changefeed-initialize-error
CheckpointTs: 449545397514272771
SinkURI: kafka://127.0.0.1:9092/ticdc-sink-retry-test-28424?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
+ set +x
+ tso='449545398709387265
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545398709387265 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
[Sun May  5 11:26:44 CST 2024] <<<<<< START cdc server in multi_source case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_source.86678669.out server --log-file /tmp/tidb_cdc_test/multi_source/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_source/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
check diff failed 5-th time, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b28a1c40006	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd, pid:31020, start at 2024-05-05 11:26:44.853539747 +0800 CST m=+5.171881074	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:44.860 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:44.849 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:44.849 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
check diff failed 2-th time, retry later
+ set +x
[Sun May  5 11:26:40 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>>
schema registry uri found: 1
[Sun May  5 11:26:40 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>>
schema registry uri found: 2
[Sun May  5 11:26:40 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>>
schema registry uri found: 3
table test.table1 not exists for 1-th check, retry later
table test.table1 not exists for 2-th check, retry later
table test.table1 exists
table test.table2 exists
table test.table3 exists
check diff successfully
table test.table10 not exists for 1-th check, retry later
table test.finish_mark not exists for 9-th check, retry later
table mark.finish_mark_3 not exists for 2-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
wait process cdc.test exit for 1-th time...
table test.table10 not exists for 2-th check, retry later
start tidb cluster in /tmp/tidb_cdc_test/region_merge
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
check diff successfully
check diff failed 1-th time, retry later
wait process cdc.test exit for 2-th time...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:26:47 CST 2024] <<<<<< run test case changefeed_error success! >>>>>>
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b28b90c0017	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f, pid:10679, start at 2024-05-05 11:26:46.371080986 +0800 CST m=+5.159780519	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:46.377 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:46.339 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:46.339 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b28b90c0017	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f, pid:10679, start at 2024-05-05 11:26:46.371080986 +0800 CST m=+5.159780519	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:46.377 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:46.339 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:46.339 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b28ba840015	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f, pid:10760, start at 2024-05-05 11:26:46.470314709 +0800 CST m=+5.210349769	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:46.476 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:46.433 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:46.433 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
check diff failed 3-th time, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b28a1c40006	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd, pid:31020, start at 2024-05-05 11:26:44.853539747 +0800 CST m=+5.171881074	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:44.860 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:44.849 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:44.849 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b28a2600015	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd, pid:31094, start at 2024-05-05 11:26:44.923446925 +0800 CST m=+5.188382834	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:44.932 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:44.888 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:44.888 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/kafka_column_selector_avro/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/kafka_column_selector_avro/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_column_selector_avro/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_column_selector_avro/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_column_selector_avro/tiflash-proxy.toml"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:26:47 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/03e93818-f28e-4b1c-b493-c088c98f2744
	{"id":"03e93818-f28e-4b1c-b493-c088c98f2744","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879605}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c9e4c6d4
	03e93818-f28e-4b1c-b493-c088c98f2744

/tidb/cdc/default/default/upstream/7365351753888436103
	{"id":7365351753888436103,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/03e93818-f28e-4b1c-b493-c088c98f2744
	{"id":"03e93818-f28e-4b1c-b493-c088c98f2744","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879605}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c9e4c6d4
	03e93818-f28e-4b1c-b493-c088c98f2744

/tidb/cdc/default/default/upstream/7365351753888436103
	{"id":7365351753888436103,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/03e93818-f28e-4b1c-b493-c088c98f2744
	{"id":"03e93818-f28e-4b1c-b493-c088c98f2744","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879605}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c9e4c6d4
	03e93818-f28e-4b1c-b493-c088c98f2744

/tidb/cdc/default/default/upstream/7365351753888436103
	{"id":7365351753888436103,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_source.cli.8713.out cli changefeed create --start-ts=449545398709387265 '--sink-uri=kafka://127.0.0.1:9092/ticdc-multi-source-test-3613?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760'
table test.finish_mark not exists for 10-th check, retry later
Create changefeed successfully!
ID: 2ee07fe2-ec53-41ba-863c-19326dd3df30
Info: {"upstream_id":7365351753888436103,"namespace":"default","id":"2ee07fe2-ec53-41ba-863c-19326dd3df30","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-source-test-3613?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:26:48.250774079+08:00","start_ts":449545398709387265,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545398709387265,"checkpoint_ts":449545398709387265,"checkpoint_time":"2024-05-05 11:26:43.231"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table mark.finish_mark_3 not exists for 3-th check, retry later
table test.table10 not exists for 3-th check, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
+ cd /tmp/tidb_cdc_test/synced_status_with_redo
++ run_cdc_cli_tso_query 127.0.0.1 2379
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.cli.12193.out cli tso query --pd=http://127.0.0.1:2379
+ set +x
[Sun May  5 11:26:49 CST 2024] <<<<<< START kafka consumer in multi_source case >>>>>>
go: downloading github.com/pingcap/errors v0.11.5-0.20240318064555-6bd07397691f
go: downloading github.com/pingcap/log v1.1.1-0.20240314023424-862ccc32f18d
go: downloading go.uber.org/zap v1.27.0
go: downloading github.com/pingcap/tidb-tools v0.0.0-20240305021104-9f9bea84490b
go: downloading github.com/BurntSushi/toml v1.3.2
go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20240415145106-cd9c676e9ba4
go: downloading gopkg.in/natefinch/lumberjack.v2 v2.2.1
go: downloading go.uber.org/atomic v1.11.0
go: downloading go.uber.org/multierr v1.11.0
go: downloading github.com/pingcap/failpoint v0.0.0-20220801062533-2eaa32854a6c
go: downloading github.com/go-sql-driver/mysql v1.7.1
go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20240410110152-5fc42c9be2f5
go: downloading github.com/coreos/go-semver v0.3.1
go: downloading google.golang.org/grpc v1.62.1
table test.finish_mark not exists for 11-th check, retry later
check diff successfully
wait process cdc.test exit for 1-th time...
go: downloading github.com/golang/protobuf v1.5.4
go: downloading golang.org/x/net v0.24.0
go: downloading google.golang.org/protobuf v1.33.0
go: downloading golang.org/x/sys v0.19.0
go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20240401170217-c3f982113cda
go: downloading google.golang.org/genproto v0.0.0-20240401170217-c3f982113cda
go: downloading golang.org/x/text v0.14.0
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b28e460001e	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-6skrl-cxvmd, pid:7139, start at 2024-05-05 11:26:49.15733792 +0800 CST m=+5.154406254	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:49.168 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:49.163 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:49.163 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b28e460001e	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-6skrl-cxvmd, pid:7139, start at 2024-05-05 11:26:49.15733792 +0800 CST m=+5.154406254	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:49.168 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:49.163 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:49.163 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b28e6a40015	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-6skrl-cxvmd, pid:7217, start at 2024-05-05 11:26:49.299284884 +0800 CST m=+5.247597961	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:49.306 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:49.307 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:49.307 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/ddl_only_block_related_table/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/ddl_only_block_related_table/tiflash/log/error.log
arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/ddl_only_block_related_table/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/ddl_only_block_related_table/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/ddl_only_block_related_table/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
[Sun May  5 11:26:50 CST 2024] <<<<<< START cdc server in kafka_column_selector_avro case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_column_selector_avro.3246432466.out server --log-file /tmp/tidb_cdc_test/kafka_column_selector_avro/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_column_selector_avro/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
table mark.finish_mark_3 not exists for 4-th check, retry later
wait process cdc.test exit for 2-th time...
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/move_table/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:26:51 CST 2024] <<<<<< run test case new_ci_collation success! >>>>>>
check diff failed 4-th time, retry later
table test.table10 exists
table test.table20 exists
check diff successfully
+ set +x
+ tso='449545400441634817
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545400441634817 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
+ start_ts=449545400441634817
+ run_cdc_server --workdir /tmp/tidb_cdc_test/synced_status_with_redo --binary cdc.test
[Sun May  5 11:26:51 CST 2024] <<<<<< START cdc server in synced_status_with_redo case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ GO_FAILPOINTS=
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.1223312235.out server --log-file /tmp/tidb_cdc_test/synced_status_with_redo/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/synced_status_with_redo/cdc_data --cluster-id default
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish_mark exists
go: downloading github.com/cznic/mathutil v0.0.0-20181122101859-297441e03548
go: downloading golang.org/x/exp v0.0.0-20240409090435-93d18d7e34b8
go: downloading golang.org/x/sync v0.7.0
go: downloading github.com/pingcap/kvproto v0.0.0-20240227073058-929ab83f9754
go: downloading github.com/ngaut/pools v0.0.0-20180318154953-b7bc8c42aac7
go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20240409022718-714958ccd4d5
go: downloading go.etcd.io/etcd/client/v3 v3.5.12
go: downloading github.com/scalalang2/golang-fifo v0.1.5
go: downloading github.com/tiancaiamao/gp v0.0.0-20221230034425-4025bc8a4d4a
go: downloading github.com/pingcap/tipb v0.0.0-20240318032315-55a7867ddd50
go: downloading github.com/tidwall/btree v1.7.0
go: downloading github.com/tikv/pd/client v0.0.0-20240322051414-fb9e2d561b6e
go: downloading github.com/influxdata/tdigest v0.0.1
go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.4.0
go: downloading github.com/prometheus/client_golang v1.19.0
go: downloading github.com/opentracing/opentracing-go v1.2.0
go: downloading github.com/pingcap/sysutil v1.0.1-0.20240311050922-ae81ee01f3a5
go: downloading gopkg.in/yaml.v2 v2.4.0
go: downloading github.com/shirou/gopsutil/v3 v3.24.2
go: downloading github.com/uber/jaeger-client-go v2.30.0+incompatible
go: downloading github.com/gorilla/mux v1.8.0
go: downloading github.com/danjacques/gofslock v0.0.0-20240212154529-d899e02bfe22
go: downloading github.com/coocood/freecache v1.2.1
go: downloading github.com/google/btree v1.1.2
go: downloading github.com/docker/go-units v0.5.0
go: downloading github.com/google/uuid v1.6.0
go: downloading github.com/prometheus/client_model v0.6.1
go: downloading github.com/cockroachdb/errors v1.11.1
go: downloading github.com/stretchr/testify v1.9.0
go: downloading github.com/spf13/pflag v1.0.5
go: downloading github.com/twmb/murmur3 v1.1.6
go: downloading github.com/gogo/protobuf v1.3.2
go: downloading golang.org/x/tools v0.20.0
go: downloading github.com/tikv/pd v1.1.0-beta.0.20240407022249-7179657d129b
go: downloading github.com/jellydator/ttlcache/v3 v3.0.1
go: downloading github.com/golang/snappy v0.0.4
go: downloading github.com/opentracing/basictracer-go v1.1.0
go: downloading github.com/ngaut/sync2 v0.0.0-20141008032647-7a24ed77b2ef
go: downloading github.com/dolthub/swiss v0.2.1
go: downloading github.com/cespare/xxhash/v2 v2.3.0
go: downloading github.com/yangkeao/ldap/v3 v3.4.5-0.20230421065457-369a3bab1117
go: downloading cloud.google.com/go/storage v1.39.1
go: downloading github.com/Azure/azure-sdk-for-go/sdk/azcore v1.9.1
go: downloading github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.5.1
go: downloading github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v1.0.0
go: downloading github.com/aliyun/alibaba-cloud-sdk-go v1.61.1581
go: downloading github.com/aws/aws-sdk-go v1.50.0
go: downloading github.com/go-resty/resty/v2 v2.11.0
go: downloading github.com/klauspost/compress v1.17.8
go: downloading github.com/ks3sdklib/aws-sdk-go v1.2.9
go: downloading google.golang.org/api v0.170.0
go: downloading golang.org/x/oauth2 v0.18.0
go: downloading go.etcd.io/etcd/api/v3 v3.5.12
go: downloading github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec
go: downloading go.uber.org/mock v0.4.0
go: downloading github.com/cockroachdb/pebble v1.1.0
go: downloading github.com/jfcg/sorty/v2 v2.1.0
go: downloading golang.org/x/time v0.5.0
go: downloading cloud.google.com/go v0.112.2
go: downloading github.com/carlmjohnson/flagext v0.21.0
go: downloading github.com/dgraph-io/ristretto v0.1.1
go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.12
go: downloading github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc
go: downloading github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2
go: downloading gopkg.in/yaml.v3 v3.0.1
go: downloading github.com/dolthub/maphash v0.1.0
go: downloading github.com/beorn7/perks v1.0.1
go: downloading github.com/prometheus/common v0.52.2
go: downloading github.com/prometheus/procfs v0.13.0
go: downloading github.com/pkg/errors v0.9.1
go: downloading github.com/uber/jaeger-lib v2.4.1+incompatible
go: downloading github.com/tklauser/go-sysconf v0.3.12
go: downloading github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358
go: downloading github.com/go-asn1-ber/asn1-ber v1.5.4
go: downloading github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b
go: downloading github.com/cockroachdb/redact v1.1.5
go: downloading github.com/getsentry/sentry-go v0.27.0
go: downloading github.com/Azure/azure-sdk-for-go/sdk/internal v1.5.1
go: downloading github.com/AzureAD/microsoft-authentication-library-for-go v1.2.1
go: downloading golang.org/x/crypto v0.22.0
go: downloading github.com/joho/sqltocsv v0.0.0-20210428211105-a6d6801d59df
go: downloading github.com/jedib0t/go-pretty/v6 v6.2.2
check diff successfully
go: downloading github.com/lestrrat-go/jwx/v2 v2.0.21
go: downloading github.com/dgryski/go-farm v0.0.0-20200201041132-a6ae2369ad13
go: downloading github.com/cloudfoundry/gosigar v1.3.6
go: downloading github.com/otiai10/copy v1.2.0
go: downloading github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2
go: downloading github.com/spkg/bom v1.0.0
go: downloading github.com/xitongsys/parquet-go v1.6.0
go: downloading github.com/jfcg/sixb v1.3.8
go: downloading github.com/google/pprof v0.0.0-20240117000934-35fc243c5815
go: downloading github.com/wangjohn/quickselect v0.0.0-20161129230411-ed8402a42d5f
go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20240401170217-c3f982113cda
go: downloading cloud.google.com/go/compute/metadata v0.2.3
go: downloading github.com/coreos/go-systemd/v22 v22.5.0
go: downloading cloud.google.com/go/compute v1.25.1
go: downloading github.com/kr/pretty v0.3.1
go: downloading cloud.google.com/go/iam v1.1.7
go: downloading github.com/googleapis/gax-go/v2 v2.12.3
go: downloading github.com/tklauser/numcpus v0.6.1
go: downloading github.com/cheggaaa/pb/v3 v3.0.8
go: downloading github.com/kylelemons/godebug v1.1.0
go: downloading github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c
go: downloading github.com/robfig/cron/v3 v3.0.1
go: downloading github.com/mattn/go-runewidth v0.0.15
go: downloading github.com/pingcap/badger v1.5.1-0.20230103063557-828f39b09b6d
go: downloading github.com/pingcap/goleveldb v0.0.0-20191226122134-f82aafb29989
go: downloading github.com/robfig/cron v1.2.0
go: downloading github.com/apache/thrift v0.16.0
go: downloading github.com/lestrrat-go/blackmagic v1.0.2
go: downloading github.com/lestrrat-go/httprc v1.0.5
go: downloading github.com/lestrrat-go/iter v1.0.2
go: downloading github.com/lestrrat-go/option v1.0.1
go: downloading github.com/kr/text v0.2.0
go: downloading github.com/rogpeppe/go-internal v1.12.0
go: downloading go.opencensus.io v0.23.1-0.20220331163232-052120675fac
go: downloading go.opentelemetry.io/otel v1.24.0
table mark.finish_mark_3 not exists for 5-th check, retry later
go: downloading go.opentelemetry.io/otel/trace v1.24.0
go: downloading github.com/dustin/go-humanize v1.0.1
go: downloading github.com/golang/glog v1.2.0
go: downloading github.com/golang-jwt/jwt/v5 v5.2.0
go: downloading github.com/VividCortex/ewma v1.2.0
go: downloading github.com/fatih/color v1.16.0
go: downloading github.com/mattn/go-colorable v0.1.13
go: downloading github.com/mattn/go-isatty v0.0.20
go: downloading github.com/rivo/uniseg v0.4.7
go: downloading github.com/lestrrat-go/httpcc v1.0.1
go: downloading github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da
go: downloading github.com/ncw/directio v1.0.5
go: downloading github.com/golang-jwt/jwt v3.2.2+incompatible
go: downloading github.com/coocood/rtutil v0.0.0-20190304133409-c84515f646f2
go: downloading github.com/coocood/bbloom v0.0.0-20190830030839-58deb6228d64
go: downloading github.com/klauspost/cpuid v1.3.1
go: downloading github.com/go-logr/logr v1.4.1
go: downloading go.opentelemetry.io/otel/metric v1.24.0
go: downloading github.com/go-logr/stdr v1.2.2
go: downloading github.com/DataDog/zstd v1.5.5
go: downloading github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06
[Sun May  5 11:26:52 CST 2024] <<<<<< START cdc server in ddl_only_block_related_table case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_only_block_related_table.85918593.out server --log-file /tmp/tidb_cdc_test/ddl_only_block_related_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_only_block_related_table/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
check diff successfully
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:26:53 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/d9abf2e9-5a13-4cd1-814f-8eab39119284
	{"id":"d9abf2e9-5a13-4cd1-814f-8eab39119284","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879610}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c9fc77cd
	d9abf2e9-5a13-4cd1-814f-8eab39119284

/tidb/cdc/default/default/upstream/7365351783654365355
	{"id":7365351783654365355,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/d9abf2e9-5a13-4cd1-814f-8eab39119284
	{"id":"d9abf2e9-5a13-4cd1-814f-8eab39119284","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879610}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c9fc77cd
	d9abf2e9-5a13-4cd1-814f-8eab39119284

/tidb/cdc/default/default/upstream/7365351783654365355
	{"id":7365351783654365355,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/d9abf2e9-5a13-4cd1-814f-8eab39119284
	{"id":"d9abf2e9-5a13-4cd1-814f-8eab39119284","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879610}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46c9fc77cd
	d9abf2e9-5a13-4cd1-814f-8eab39119284

/tidb/cdc/default/default/upstream/7365351783654365355
	{"id":7365351783654365355,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_column_selector_avro.cli.32515.out cli changefeed create --start-ts=449545400496422914 '--sink-uri=kafka://127.0.0.1:9092/column-selector-avro-test?protocol=avro&enable-tidb-extension=true&avro-enable-watermark=true' -c test --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_column_selector_avro/conf/changefeed.toml --schema-registry=http://127.0.0.1:8088
Create changefeed successfully!
ID: test
Info: {"upstream_id":7365351783654365355,"namespace":"default","id":"test","sink_uri":"kafka://127.0.0.1:9092/column-selector-avro-test?protocol=avro\u0026enable-tidb-extension=true\u0026avro-enable-watermark=true","create_time":"2024-05-05T11:26:53.634368655+08:00","start_ts":449545400496422914,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"avro","schema_registry":"http://127.0.0.1:8088","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"column_selectors":[{"matcher":["test.*"],"columns":["*","!b"]}],"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545400496422914,"checkpoint_ts":449545400496422914,"checkpoint_time":"2024-05-05 11:26:50.048"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table mark.finish_mark_3 not exists for 6-th check, retry later
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:26:54 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/19597efd-fae9-4816-b434-c8e866f42e38
	{"id":"19597efd-fae9-4816-b434-c8e866f42e38","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879611}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca0634d2
	19597efd-fae9-4816-b434-c8e866f42e38

/tidb/cdc/default/default/upstream/7365351782904710483
	{"id":7365351782904710483,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/19597efd-fae9-4816-b434-c8e866f42e38
	{"id":"19597efd-fae9-4816-b434-c8e866f42e38","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879611}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca0634d2
	19597efd-fae9-4816-b434-c8e866f42e38

/tidb/cdc/default/default/upstream/7365351782904710483
	{"id":7365351782904710483,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/19597efd-fae9-4816-b434-c8e866f42e38
	{"id":"19597efd-fae9-4816-b434-c8e866f42e38","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879611}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca0634d2
	19597efd-fae9-4816-b434-c8e866f42e38

/tidb/cdc/default/default/upstream/7365351782904710483
	{"id":7365351782904710483,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ config_path=conf/changefeed-redo.toml
+ SINK_URI='mysql://root@127.0.0.1:3306/?max-txn-row=1'
+ run_cdc_cli changefeed create --start-ts=449545400441634817 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/conf/changefeed-redo.toml
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.cli.12288.out cli changefeed create --start-ts=449545400441634817 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/conf/changefeed-redo.toml
check diff failed 1-th time, retry later
Create changefeed successfully!
ID: test-1
Info: {"upstream_id":7365351782904710483,"namespace":"default","id":"test-1","sink_uri":"mysql://root@127.0.0.1:3306/?max-txn-row=1","create_time":"2024-05-05T11:26:54.832484051+08:00","start_ts":449545400441634817,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"eventual","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"storage":"file:///tmp/tidb_cdc_test/synced_status/redo","use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":120,"checkpoint_interval":20}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545400441634817,"checkpoint_ts":449545400441634817,"checkpoint_time":"2024-05-05 11:26:49.839"}
PASS
coverage: 2.5% of statements in github.com/pingcap/tiflow/...
start tidb cluster in /tmp/tidb_cdc_test/move_table
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
+ set +x
[Sun May  5 11:26:55 CST 2024] <<<<<< START kafka consumer in kafka_column_selector_avro case >>>>>>
consumer replica config found: /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_column_selector_avro/conf/changefeed.toml
schema registry uri found: http://127.0.0.1:8088
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:26:55 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/99613da4-b61b-409f-9555-42d6ed7a2972
	{"id":"99613da4-b61b-409f-9555-42d6ed7a2972","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879612}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca1525c7
	99613da4-b61b-409f-9555-42d6ed7a2972

/tidb/cdc/default/default/upstream/7365351800315892975
	{"id":7365351800315892975,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/99613da4-b61b-409f-9555-42d6ed7a2972
	{"id":"99613da4-b61b-409f-9555-42d6ed7a2972","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879612}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca1525c7
	99613da4-b61b-409f-9555-42d6ed7a2972

/tidb/cdc/default/default/upstream/7365351800315892975
	{"id":7365351800315892975,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/99613da4-b61b-409f-9555-42d6ed7a2972
	{"id":"99613da4-b61b-409f-9555-42d6ed7a2972","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879612}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca1525c7
	99613da4-b61b-409f-9555-42d6ed7a2972

/tidb/cdc/default/default/upstream/7365351800315892975
	{"id":7365351800315892975,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_only_block_related_table.cli.8652.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-common-1-test-23151?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' -c=ddl-only-block-related-table
Starting build checksum checker...
table test.finishmark not exists for 1-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.49.0
go: downloading github.com/google/s2a-go v0.1.7
go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2
go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0
go: downloading github.com/felixge/httpsnoop v1.0.4
go: downloading github.com/jmespath/go-jmespath v0.4.0
Create changefeed successfully!
ID: ddl-only-block-related-table
Info: {"upstream_id":7365351800315892975,"namespace":"default","id":"ddl-only-block-related-table","sink_uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-23151?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:26:55.981071212+08:00","start_ts":449545402007420932,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545402007420932,"checkpoint_ts":449545402007420932,"checkpoint_time":"2024-05-05 11:26:55.812"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
+ set +x
++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed
table mark.finish_mark_3 not exists for 7-th check, retry later

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   221  100   221    0     0   3610      0 --:--:-- --:--:-- --:--:--  3622
+ synced_status='{"synced":true,"sink_checkpoint_ts":"2024-05-05 11:26:49.839","puller_resolved_ts":"1970-01-01 08:00:00.000","last_synced_ts":"1970-01-01 08:00:00.000","now_ts":"2024-05-05 11:26:56.000","info":"Data syncing is finished"}'
++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-05' '11:26:49.839","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-05-05' '11:26:56.000","info":"Data' syncing is 'finished"}'
++ jq .synced
+ status=true
++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-05' '11:26:49.839","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-05-05' '11:26:56.000","info":"Data' syncing is 'finished"}'
++ jq -r .sink_checkpoint_ts
+ sink_checkpoint_ts='2024-05-05 11:26:49.839'
++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-05' '11:26:49.839","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-05-05' '11:26:56.000","info":"Data' syncing is 'finished"}'
++ jq -r .puller_resolved_ts
+ puller_resolved_ts='1970-01-01 08:00:00.000'
++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-05' '11:26:49.839","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-05-05' '11:26:56.000","info":"Data' syncing is 'finished"}'
++ jq -r .last_synced_ts
+ last_synced_ts='1970-01-01 08:00:00.000'
+ '[' true '!=' true ']'
+ '[' '1970-01-01 08:00:00.000' '!=' '1970-01-01 08:00:00.000' ']'
+ '[' '1970-01-01 08:00:00.000' '!=' '1970-01-01 08:00:00.000' ']'
++ date '+%Y-%m-%d %H:%M:%S'
+ current='2024-05-05 11:26:56'
+ echo 'sink_checkpoint_ts is 2024-05-05' 11:26:49.839
sink_checkpoint_ts is 2024-05-05 11:26:49.839
++ date -d '2024-05-05 11:26:49.839' +%s
+ checkpoint_timestamp=1714879609
++ date -d '2024-05-05 11:26:56' +%s
+ current_timestamp=1714879616
+ '[' 7 -gt 300 ']'
+ run_sql 'USE TEST;Create table t1(a int primary key, b int);insert into t1 values(1,2);insert into t1 values(2,3);'
+ check_table_exists test.t1 127.0.0.1 3306
table test.t1 not exists for 1-th check, retry later
check diff failed 2-th time, retry later
go: downloading github.com/json-iterator/go v1.1.12
go: downloading github.com/modern-go/reflect2 v1.0.2
go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd
table test.finishmark not exists for 2-th check, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
+ set +x
[Sun May  5 11:26:57 CST 2024] <<<<<< START kafka consumer in ddl_only_block_related_table case >>>>>>
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table ddl_only_block_related_table.finish_mark not exists for 1-th check, retry later
table mark.finish_mark_3 not exists for 8-th check, retry later
table test.t1 exists
+ sleep 5
check diff failed 3-th time, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b295bfc0014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-nbv84-f5n24, pid:4186, start at 2024-05-05 11:26:56.799797426 +0800 CST m=+5.324378142	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:56.808 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:56.767 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:56.767 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b295bfc0014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-nbv84-f5n24, pid:4186, start at 2024-05-05 11:26:56.799797426 +0800 CST m=+5.324378142	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:56.808 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:56.767 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:56.767 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b295bf80014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-nbv84-f5n24, pid:4266, start at 2024-05-05 11:26:56.788301569 +0800 CST m=+5.267093595	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:28:56.799 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:26:56.766 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:16:56.766 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/region_merge/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/region_merge/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/region_merge/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/region_merge/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/region_merge/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table ddl_only_block_related_table.finish_mark not exists for 2-th check, retry later
table test.finishmark exists
[2024/05/05 11:26:59.635 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test.finishmark] [checkSum=0]
[2024/05/05 11:26:59.637 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test.t1] [checkSum=718014124]
[2024/05/05 11:26:59.637 +08:00] [INFO] [main.go:107] ["get checksum for the upstream success"] [elapsed=9.279597ms]
[2024/05/05 11:26:59.643 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test.finishmark] [checkSum=0]
[2024/05/05 11:26:59.645 +08:00] [INFO] [main.go:186] ["do checkSum success"] [table=test.t1] [checkSum=718014124]
[2024/05/05 11:26:59.645 +08:00] [INFO] [main.go:116] ["get checksum for the downstream success"] [elapsed=8.067151ms]
[2024/05/05 11:26:59.645 +08:00] [INFO] [main.go:95] ["compare checksum passed"]
wait process cdc.test exit for 1-th time...
table mark.finish_mark_3 not exists for 9-th check, retry later
wait process cdc.test exit for 2-th time...
check diff failed 4-th time, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:27:01 CST 2024] <<<<<< run test case kafka_column_selector_avro success! >>>>>>
table ddl_only_block_related_table.finish_mark not exists for 3-th check, retry later
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/ddl_sequence/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
[Sun May  5 11:27:01 CST 2024] <<<<<< START cdc server in region_merge case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.region_merge.57155717.out server --log-file /tmp/tidb_cdc_test/region_merge/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/region_merge/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
table mark.finish_mark_3 not exists for 10-th check, retry later
check diff failed 5-th time, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   243  100   243    0     0   4163      0 --:--:-- --:--:-- --:--:--  4189
+ synced_status='{"synced":false,"sink_checkpoint_ts":"2024-05-05 11:27:02.240","puller_resolved_ts":"2024-05-05 11:26:56.489","last_synced_ts":"2024-05-05 11:26:56.540","now_ts":"2024-05-05 11:27:03.000","info":"The data syncing is not finished, please wait"}'
++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-05' '11:27:02.240","puller_resolved_ts":"2024-05-05' '11:26:56.489","last_synced_ts":"2024-05-05' '11:26:56.540","now_ts":"2024-05-05' '11:27:03.000","info":"The' data syncing is not finished, please 'wait"}'
++ jq .synced
+ status=false
+ '[' false '!=' false ']'
++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-05' '11:27:02.240","puller_resolved_ts":"2024-05-05' '11:26:56.489","last_synced_ts":"2024-05-05' '11:26:56.540","now_ts":"2024-05-05' '11:27:03.000","info":"The' data syncing is not finished, please 'wait"}'
++ jq -r .info
+ info='The data syncing is not finished, please wait'
+ '[' 'The data syncing is not finished, please wait' '!=' 'The data syncing is not finished, please wait' ']'
+ sleep 130
table test.finish_mark not exists for 1-th check, retry later
table ddl_only_block_related_table.finish_mark not exists for 4-th check, retry later
table mark.finish_mark_3 not exists for 11-th check, retry later
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:27:04 GMT
< Content-Length: 859
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/3b4adc9d-d6e2-494e-9a19-b94f1f6d757c
	{"id":"3b4adc9d-d6e2-494e-9a19-b94f1f6d757c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879622}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca2e5598
	3b4adc9d-d6e2-494e-9a19-b94f1f6d757c

/tidb/cdc/default/default/upstream/7365351835132298689
	{"id":7365351835132298689,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/3b4adc9d-d6e2-494e-9a19-b94f1f6d757c
	{"id":"3b4adc9d-d6e2-494e-9a19-b94f1f6d757c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879622}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca2e5598
	3b4adc9d-d6e2-494e-9a19-b94f1f6d757c

/tidb/cdc/default/default/upstream/7365351835132298689
	{"id":7365351835132298689,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/3b4adc9d-d6e2-494e-9a19-b94f1f6d757c
	{"id":"3b4adc9d-d6e2-494e-9a19-b94f1f6d757c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879622}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca2e5598
	3b4adc9d-d6e2-494e-9a19-b94f1f6d757c

/tidb/cdc/default/default/upstream/7365351835132298689
	{"id":7365351835132298689,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
Create changefeed successfully!
ID: 4be87caa-73f7-41d6-aa43-77a87dc008c3
Info: {"upstream_id":7365351835132298689,"namespace":"default","id":"4be87caa-73f7-41d6-aa43-77a87dc008c3","sink_uri":"kafka://127.0.0.1:9092/ticdc-region-merge-test-4901?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:27:05.09726157+08:00","start_ts":449545404394242054,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545404394242054,"checkpoint_ts":449545404394242054,"checkpoint_time":"2024-05-05 11:27:04.917"}
[Sun May  5 11:27:05 CST 2024] <<<<<< START kafka consumer in region_merge case >>>>>>
check diff successfully
table test.finish_mark not exists for 2-th check, retry later
table ddl_only_block_related_table.finish_mark exists
split_and_random_merge scale: 20
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
wait process 8596 exit for 1-th time...
table mark.finish_mark_3 not exists for 12-th check, retry later
wait process 8596 exit for 2-th time...
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils/kill_cdc_pid: line 19: kill: (8596) - No such process
wait process 8596 exit for 3-th time...
process 8596 already exit
[Sun May  5 11:27:07 CST 2024] <<<<<< START cdc server in ddl_only_block_related_table case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/ExecuteNotDone=return(true)'
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_only_block_related_table.87758777.out server --log-file /tmp/tidb_cdc_test/ddl_only_block_related_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_only_block_related_table/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
start tidb cluster in /tmp/tidb_cdc_test/ddl_sequence
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table test.finish_mark not exists for 3-th check, retry later
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
check diff failed 1-th time, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b29eaa00015	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-703xz-4qz9g, pid:23976, start at 2024-05-05 11:27:05.91747325 +0800 CST m=+6.704184542	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:29:05.924 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:27:05.923 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:17:05.923 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b29eaa00015	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-703xz-4qz9g, pid:23976, start at 2024-05-05 11:27:05.91747325 +0800 CST m=+6.704184542	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:29:05.924 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:27:05.923 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:17:05.923 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b29d4540013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-703xz-4qz9g, pid:24065, start at 2024-05-05 11:27:04.491638464 +0800 CST m=+5.222872538	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:29:04.500 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:27:04.469 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:17:04.469 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/move_table/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/move_table/tiflash/log/error.log
arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/move_table/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/move_table/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/move_table/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table mark.finish_mark_3 not exists for 13-th check, retry later
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/batch_add_table/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
table test.finish_mark not exists for 4-th check, retry later
check diff failed 2-th time, retry later
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.move_table.cli.25519.out cli tso query --pd=http://127.0.0.1:2379
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table mark.finish_mark_3 not exists for 14-th check, retry later
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:27:10 GMT
< Content-Type: text/plain; charset=utf-8
< Transfer-Encoding: chunked
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:

changefeedID: default/ddl-only-block-related-table
{UpstreamID:7365351800315892975 Namespace:default ID:ddl-only-block-related-table SinkURI:kafka://127.0.0.1:9092/ticdc-common-1-test-23151?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-05 11:26:55.981071212 +0800 CST StartTs:449545402007420932 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0038af710 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545402047004675}
{CheckpointTs:449545405743235077 MinTableBarrierTs:449545405743235077 AdminJobType:noop}
span: {table_id:112,start_key:7480000000000000ff705f720000000000fa,end_key:7480000000000000ff705f730000000000fa}, resolvedTs: 449545405743235078, checkpointTs: 449545405743235077, state: Replicating
span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449545405743235077, checkpointTs: 449545405743235077, state: Replicating
span: {table_id:108,start_key:7480000000000000ff6c5f720000000000fa,end_key:7480000000000000ff6c5f730000000000fa}, resolvedTs: 449545405743235077, checkpointTs: 449545405743235077, state: Replicating
span: {table_id:110,start_key:7480000000000000ff6e5f720000000000fa,end_key:7480000000000000ff6e5f730000000000fa}, resolvedTs: 449545405743235077, checkpointTs: 449545405743235077, state: Replicating



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/bd90ba3d-7120-41ef-aaa5-13a01707144b
	{"id":"bd90ba3d-7120-41ef-aaa5-13a01707144b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879627}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca15269a
	bd90ba3d-7120-41ef-aaa5-13a01707144b

/tidb/cdc/default/default/changefeed/info/ddl-only-block-related-table
	{"upstream-id":7365351800315892975,"namespace":"default","changefeed-id":"ddl-only-block-related-table","sink-uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-23151?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:26:55.981071212+08:00","start-ts":449545402007420932,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545402047004675}

/tidb/cdc/default/default/changefeed/status/ddl-only-block-related-table
	{"checkpoint-ts":449545405743235077,"min-table-barrier-ts":449545405743235077,"admin-job-type":0}

/tidb/cdc/default/default/task/position/bd90ba3d-7120-41ef-aaa5-13a01707144b/ddl-only-block-related-table
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/upstream/7365351800315892975
	{"id":7365351800315892975,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:

changefeedID: default/ddl-only-block-related-table
{UpstreamID:7365351800315892975 Namespace:default ID:ddl-only-block-related-table SinkURI:kafka://127.0.0.1:9092/ticdc-common-1-test-23151?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-05 11:26:55.981071212 +0800 CST StartTs:449545402007420932 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0038af710 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545402047004675}
{CheckpointTs:449545405743235077 MinTableBarrierTs:449545405743235077 AdminJobType:noop}
span: {table_id:112,start_key:7480000000000000ff705f720000000000fa,end_key:7480000000000000ff705f730000000000fa}, resolvedTs: 449545405743235078, checkpointTs: 449545405743235077, state: Replicating
span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449545405743235077, checkpointTs: 449545405743235077, state: Replicating
span: {table_id:108,start_key:7480000000000000ff6c5f720000000000fa,end_key:7480000000000000ff6c5f730000000000fa}, resolvedTs: 449545405743235077, checkpointTs: 449545405743235077, state: Replicating
span: {table_id:110,start_key:7480000000000000ff6e5f720000000000fa,end_key:7480000000000000ff6e5f730000000000fa}, resolvedTs: 449545405743235077, checkpointTs: 449545405743235077, state: Replicating



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/bd90ba3d-7120-41ef-aaa5-13a01707144b
	{"id":"bd90ba3d-7120-41ef-aaa5-13a01707144b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879627}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca15269a
	bd90ba3d-7120-41ef-aaa5-13a01707144b

/tidb/cdc/default/default/changefeed/info/ddl-only-block-related-table
	{"upstream-id":7365351800315892975,"namespace":"default","changefeed-id":"ddl-only-block-related-table","sink-uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-23151?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:26:55.981071212+08:00","start-ts":449545402007420932,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545402047004675}

/tidb/cdc/default/default/changefeed/status/ddl-only-block-related-table
	{"checkpoint-ts":449545405743235077,"min-table-barrier-ts":449545405743235077,"admin-job-type":0}

/tidb/cdc/default/default/task/position/bd90ba3d-7120-41ef-aaa5-13a01707144b/ddl-only-block-related-table
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/upstream/7365351800315892975
	{"id":7365351800315892975,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:

changefeedID: default/ddl-only-block-related-table
{UpstreamID:7365351800315892975 Namespace:default ID:ddl-only-block-related-table SinkURI:kafka://127.0.0.1:9092/ticdc-common-1-test-23151?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-05 11:26:55.981071212 +0800 CST StartTs:449545402007420932 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0038af710 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545402047004675}
{CheckpointTs:449545405743235077 MinTableBarrierTs:449545405743235077 AdminJobType:noop}
span: {table_id:112,start_key:7480000000000000ff705f720000000000fa,end_key:7480000000000000ff705f730000000000fa}, resolvedTs: 449545405743235078, checkpointTs: 449545405743235077, state: Replicating
span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449545405743235077, checkpointTs: 449545405743235077, state: Replicating
span: {table_id:108,start_key:7480000000000000ff6c5f720000000000fa,end_key:7480000000000000ff6c5f730000000000fa}, resolvedTs: 449545405743235077, checkpointTs: 449545405743235077, state: Replicating
span: {table_id:110,start_key:7480000000000000ff6e5f720000000000fa,end_key:7480000000000000ff6e5f730000000000fa}, resolvedTs: 449545405743235077, checkpointTs: 449545405743235077, state: Replicating



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/bd90ba3d-7120-41ef-aaa5-13a01707144b
	{"id":"bd90ba3d-7120-41ef-aaa5-13a01707144b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879627}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca15269a
	bd90ba3d-7120-41ef-aaa5-13a01707144b

/tidb/cdc/default/default/changefeed/info/ddl-only-block-related-table
	{"upstream-id":7365351800315892975,"namespace":"default","changefeed-id":"ddl-only-block-related-table","sink-uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-23151?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:26:55.981071212+08:00","start-ts":449545402007420932,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545402047004675}

/tidb/cdc/default/default/changefeed/status/ddl-only-block-related-table
	{"checkpoint-ts":449545405743235077,"min-table-barrier-ts":449545405743235077,"admin-job-type":0}

/tidb/cdc/default/default/task/position/bd90ba3d-7120-41ef-aaa5-13a01707144b/ddl-only-block-related-table
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/upstream/7365351800315892975
	{"id":7365351800315892975,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
check_ts_not_forward ddl-only-block-related-table
table test.finish_mark exists
start tidb cluster in /tmp/tidb_cdc_test/batch_add_table
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
check diff successfully
check diff failed 3-th time, retry later
+ set +x
+ tso='449545405850976257
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545405850976257 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
***************** properties *****************
"mysql.host"="127.0.0.1"
"readallfields"="true"
"threadcount"="10"
"readproportion"="0"
"workload"="core"
"recordcount"="10000"
"dotransactions"="false"
"insertproportion"="0"
"mysql.user"="root"
"updateproportion"="0"
"operationcount"="0"
"mysql.db"="move_table"
"scanproportion"="0"
"mysql.port"="4000"
"requestdistribution"="uniform"
**********************************************
wait process cdc.test exit for 1-th time...
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
table mark.finish_mark_3 not exists for 15-th check, retry later
wait process cdc.test exit for 2-th time...
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
*************************** 1. row ***************************
count(distinct region_id): 1
wait process cdc.test exit for 3-th time...
Verifying downstream PD is started...
Run finished, takes 1.18033959s
INSERT - Takes(s): 1.2, Count: 10000, OPS: 8505.7, Avg(us): 1137, Min(us): 783, Max(us): 5802, 95th(us): 2000, 99th(us): 2000
[Sun May  5 11:27:13 CST 2024] <<<<<< START cdc server in move_table case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ (( i = 0 ))
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.move_table.2561625618.out server --log-file /tmp/tidb_cdc_test/move_table/cdc1.log --log-level debug --data-dir /tmp/tidb_cdc_test/move_table/cdc_data1 --cluster-id default --addr 127.0.0.1:8300
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
cdc.test: no process found
wait process cdc.test exit for 4-th time...
process cdc.test already exit
[Sun May  5 11:27:13 CST 2024] <<<<<< run test case kafka_simple_basic_avro success! >>>>>>
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/debezium/run.sh using Sink-Type: kafka... <<=================
check diff failed 4-th time, retry later
table mark.finish_mark_3 not exists for 16-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed
run task failed 1-th time, retry later
check diff failed 5-th time, retry later
Starting Upstream TiDB...
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:27:16 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/6e8801fb-8669-4969-bd1c-adc8ee1fb5fe
	{"id":"6e8801fb-8669-4969-bd1c-adc8ee1fb5fe","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879633}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca4e9afd
	6e8801fb-8669-4969-bd1c-adc8ee1fb5fe

/tidb/cdc/default/default/upstream/7365351868832666162
	{"id":7365351868832666162,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/6e8801fb-8669-4969-bd1c-adc8ee1fb5fe
	{"id":"6e8801fb-8669-4969-bd1c-adc8ee1fb5fe","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879633}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca4e9afd
	6e8801fb-8669-4969-bd1c-adc8ee1fb5fe

/tidb/cdc/default/default/upstream/7365351868832666162
	{"id":7365351868832666162,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/6e8801fb-8669-4969-bd1c-adc8ee1fb5fe
	{"id":"6e8801fb-8669-4969-bd1c-adc8ee1fb5fe","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879633}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca4e9afd
	6e8801fb-8669-4969-bd1c-adc8ee1fb5fe

/tidb/cdc/default/default/upstream/7365351868832666162
	{"id":7365351868832666162,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.move_table.cli.25672.out cli changefeed create --start-ts=449545405850976257 '--sink-uri=kafka://127.0.0.1:9092/ticdc-move-table-test-27690?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760'
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Create changefeed successfully!
ID: 3ceb5c38-c007-4333-80cc-2d665dbf4092
Info: {"upstream_id":7365351868832666162,"namespace":"default","id":"3ceb5c38-c007-4333-80cc-2d665dbf4092","sink_uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-27690?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:27:16.860494755+08:00","start_ts":449545405850976257,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545405850976257,"checkpoint_ts":449545405850976257,"checkpoint_time":"2024-05-05 11:27:10.474"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
table mark.finish_mark_3 not exists for 17-th check, retry later

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100  1415  100   678  100   737    989   1075 --:--:-- --:--:-- --:--:--  1074
100  1415  100   678  100   737    989   1075 --:--:-- --:--:-- --:--:--  1074
HTTP/1.1 201 Created
Date: Sun, 05 May 2024 03:27:16 GMT
Location: http://localhost:8083/connectors/my-connector
Content-Type: application/json
Content-Length: 678
Server: Jetty(9.4.51.v20230217)

{"name":"my-connector","config":{"connector.class":"io.debezium.connector.mysql.MySqlConnector","tasks.max":"1","database.hostname":"127.0.0.1","database.port":"3310","database.user":"debezium","database.password":"dbz","database.server.id":"184054","topic.prefix":"dbserver1","schema.history.internal.kafka.bootstrap.servers":"127.0.0.1:9092","schema.history.internal.kafka.topic":"schemahistory.test","transforms":"x","transforms.x.type":"org.apache.kafka.connect.transforms.RegexRouter","transforms.x.regex":"(.*)","transforms.x.replacement":"output_debezium","binary.handling.mode":"base64","decimal.handling.mode":"double","name":"my-connector"},"tasks":[],"type":"source"}The 1 times to try to start tidb cluster...
split_and_random_merge scale: 40
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
check_ts_not_forward ddl-only-block-related-table
check diff successfully
+ set +x
[Sun May  5 11:27:18 CST 2024] <<<<<< START kafka consumer in move_table case >>>>>>
[Sun May  5 11:27:18 CST 2024] <<<<<< START cdc server in move_table case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ (( i = 0 ))
+ (( i <= 50 ))
+ GO_FAILPOINTS=
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.move_table.2571325719.out server --log-file /tmp/tidb_cdc_test/move_table/cdc2.log --log-level debug --data-dir /tmp/tidb_cdc_test/move_table/cdc_data2 --cluster-id default --addr 127.0.0.1:8301
++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8301 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8301; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
wait process cdc.test exit for 1-th time...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table mark.finish_mark_3 not exists for 18-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
wait process cdc.test exit for 2-th time...
wait process cdc.test exit for 3-th time...
cdc.test: no process found
wait process cdc.test exit for 4-th time...
process cdc.test already exit
[Sun May  5 11:27:20 CST 2024] <<<<<< run test case changefeed_pause_resume success! >>>>>>
start tidb cluster in /tmp/tidb_cdc_test/debezium
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table mark.finish_mark_3 not exists for 19-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8301 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8301 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8301
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:27:21 GMT
< Content-Type: text/plain; charset=utf-8
< Transfer-Encoding: chunked
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** processors info ***:

changefeedID: default/3ceb5c38-c007-4333-80cc-2d665dbf4092
{UpstreamID:7365351868832666162 Namespace:default ID:3ceb5c38-c007-4333-80cc-2d665dbf4092 SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-27690?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-05 11:27:16.860494755 +0800 CST StartTs:449545405850976257 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc001788750 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545407515590661}
{CheckpointTs:449545406375002388 MinTableBarrierTs:449545408681869320 AdminJobType:noop}



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/6e8801fb-8669-4969-bd1c-adc8ee1fb5fe
	{"id":"6e8801fb-8669-4969-bd1c-adc8ee1fb5fe","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879633}

/tidb/cdc/default/__cdc_meta__/capture/b6f80639-6274-452d-9432-4587e1b94260
	{"id":"b6f80639-6274-452d-9432-4587e1b94260","address":"127.0.0.1:8301","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879638}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca4e9afd
	6e8801fb-8669-4969-bd1c-adc8ee1fb5fe

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca4e9b53
	b6f80639-6274-452d-9432-4587e1b94260

/tidb/cdc/default/default/changefeed/info/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"upstream-id":7365351868832666162,"namespace":"default","changefeed-id":"3ceb5c38-c007-4333-80cc-2d665dbf4092","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-27690?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:27:16.860494755+08:00","start-ts":449545405850976257,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545407515590661}

/tidb/cdc/default/default/changefeed/status/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"checkpoint-ts":449545406414323992,"min-table-barrier-ts":449545408681869320,"admin-job-type":0}

/tidb/cdc/default/default/task/position/6e8801fb-8669-4969-bd1c-adc8ee1fb5fe/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/task/position/b6f80639-6274-452d-9432-4587e1b94260/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/upstream/7365351868832666162
	{"id":7365351868832666162,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** processors info ***:

changefeedID: default/3ceb5c38-c007-4333-80cc-2d665dbf4092
{UpstreamID:7365351868832666162 Namespace:default ID:3ceb5c38-c007-4333-80cc-2d665dbf4092 SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-27690?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-05 11:27:16.860494755 +0800 CST StartTs:449545405850976257 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc001788750 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545407515590661}
{CheckpointTs:449545406375002388 MinTableBarrierTs:449545408681869320 AdminJobType:noop}



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/6e8801fb-8669-4969-bd1c-adc8ee1fb5fe
	{"id":"6e8801fb-8669-4969-bd1c-adc8ee1fb5fe","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879633}

/tidb/cdc/default/__cdc_meta__/capture/b6f80639-6274-452d-9432-4587e1b94260
	{"id":"b6f80639-6274-452d-9432-4587e1b94260","address":"127.0.0.1:8301","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879638}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca4e9afd
	6e8801fb-8669-4969-bd1c-adc8ee1fb5fe

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca4e9b53
	b6f80639-6274-452d-9432-4587e1b94260

/tidb/cdc/default/default/changefeed/info/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"upstream-id":7365351868832666162,"namespace":"default","changefeed-id":"3ceb5c38-c007-4333-80cc-2d665dbf4092","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-27690?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:27:16.860494755+08:00","start-ts":449545405850976257,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545407515590661}

/tidb/cdc/default/default/changefeed/status/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"checkpoint-ts":449545406414323992,+ grep -q 'failed to get info:'
"min-table-barrier-ts":449545408681869320,"admin-job-type":0}

/tidb/cdc/default/default/task/position/6e8801fb-8669-4969-bd1c-adc8ee1fb5fe/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/task/position/b6f80639-6274-452d-9432-4587e1b94260/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/upstream/7365351868832666162
	{"id":7365351868832666162,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ echo '

*** processors info ***:

changefeedID: default/3ceb5c38-c007-4333-80cc-2d665dbf4092
{UpstreamID:7365351868832666162 Namespace:default ID:3ceb5c38-c007-4333-80cc-2d665dbf4092 SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-27690?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-05 11:27:16.860494755 +0800 CST StartTs:449545405850976257 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc001788750 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545407515590661}
{CheckpointTs:449545406375002388 MinTableBarrierTs:449545408681869320 AdminJobType:noop}



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/6e8801fb-8669-4969-bd1c-adc8ee1fb5fe
	{"id":"6e8801fb-8669-4969-bd1c-adc8ee1fb5fe","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879633}

/tidb/cdc/default/__cdc_meta__/capture/b6f80639-6274-452d-9432-4587e1b94260
	{"id":"b6f80639-6274-452d-9432-4587e1b94260","address":"127.0.0.1:8301","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879638}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca4e9afd
	6e8801fb-8669-4969-bd1c-adc8ee1fb5fe

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca4e9b53
	b6f80639-6274-452d-9432-4587e1b94260

/tidb/cdc/default/default/changefeed/info/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"upstream-id":7365351868832666162,"namespace":"default","changefeed-id":"3ceb5c38-c007-4333-80cc-2d665dbf4092","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-27690?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:27:16.860494755+08:00","start-ts":449545405850976257,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545407515590661}

/tidb/cdc/default/default/changefeed/status/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"checkpoint-ts":449545406414323992,"min-table-barrier-ts":449545408681869320,"admin-job-type":0}

/tidb/cdc/default/default/task/position/6e8801fb-8669-4969-bd1c-adc8ee1fb5fe/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/task/position/b6f80639-6274-452d-9432-4587e1b94260/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/upstream/7365351868832666162
	{"id":7365351868832666162,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ break
+ set +x
[Sun May  5 11:27:21 CST 2024] <<<<<< START cdc server in move_table case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ GO_FAILPOINTS=
+ (( i = 0 ))
+ (( i <= 50 ))
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.move_table.2583125833.out server --log-file /tmp/tidb_cdc_test/move_table/cdc3.log --log-level debug --data-dir /tmp/tidb_cdc_test/move_table/cdc_data3 --cluster-id default --addr 127.0.0.1:8302
++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8302 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8302; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2ac9380017	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pvtgm-6sx1h, pid:8263, start at 2024-05-05 11:27:20.167219505 +0800 CST m=+7.481382501	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:29:20.174 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:27:20.142 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:17:20.142 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2ac9380017	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pvtgm-6sx1h, pid:8263, start at 2024-05-05 11:27:20.167219505 +0800 CST m=+7.481382501	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:29:20.174 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:27:20.142 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:17:20.142 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2aa5300016	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pvtgm-6sx1h, pid:8345, start at 2024-05-05 11:27:17.878911852 +0800 CST m=+5.140557312	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:29:17.885 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:27:17.886 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:17:17.886 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/ddl_sequence/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/ddl_sequence/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/ddl_sequence/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/ddl_sequence/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/ddl_sequence/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table mark.finish_mark_3 not exists for 20-th check, retry later
Verifying downstream PD is started...
[Sun May  5 11:27:22 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>>
schema registry uri found: 10
[Sun May  5 11:27:22 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>>
schema registry uri found: 20
[Sun May  5 11:27:22 CST 2024] <<<<<< START kafka consumer in multi_topics_v2 case >>>>>>
schema registry uri found: finish
table test.finish not exists for 1-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2ae4400014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-jb9b1-r2rkj, pid:4358, start at 2024-05-05 11:27:21.900456971 +0800 CST m=+5.150643131	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:29:21.908 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:27:21.872 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:17:21.872 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2ae4400014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-jb9b1-r2rkj, pid:4358, start at 2024-05-05 11:27:21.900456971 +0800 CST m=+5.150643131	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:29:21.908 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:27:21.872 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:17:21.872 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2ae4f00008	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-jb9b1-r2rkj, pid:4435, start at 2024-05-05 11:27:21.924924724 +0800 CST m=+5.124275588	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:29:21.931 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:27:21.916 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:17:21.916 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/batch_add_table/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/batch_add_table/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/batch_add_table/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/batch_add_table/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/batch_add_table/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_sequence.cli.9796.out cli tso query --pd=http://127.0.0.1:2379
*************************** 1. row ***************************
count(distinct region_id): 1
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table mark.finish_mark_3 not exists for 21-th check, retry later
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8302 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8302 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8302
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:27:24 GMT
< Content-Type: text/plain; charset=utf-8
< Transfer-Encoding: chunked
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** processors info ***:

changefeedID: default/3ceb5c38-c007-4333-80cc-2d665dbf4092
{UpstreamID:7365351868832666162 Namespace:default ID:3ceb5c38-c007-4333-80cc-2d665dbf4092 SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-27690?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-05 11:27:16.860494755 +0800 CST StartTs:449545405850976257 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc00325a630 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545407515590661}
{CheckpointTs:449545409206157322 MinTableBarrierTs:449545409468301320 AdminJobType:noop}



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/6e8801fb-8669-4969-bd1c-adc8ee1fb5fe
	{"id":"6e8801fb-8669-4969-bd1c-adc8ee1fb5fe","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879633}

/tidb/cdc/default/__cdc_meta__/capture/89cf3409-b7df-4f98-8b01-6fa4c6e4771f
	{"id":"89cf3409-b7df-4f98-8b01-6fa4c6e4771f","address":"127.0.0.1:8302","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879641}

/tidb/cdc/default/__cdc_meta__/capture/b6f80639-6274-452d-9432-4587e1b94260
	{"id":"b6f80639-6274-452d-9432-4587e1b94260","address":"127.0.0.1:8301","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879638}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca4e9afd
	6e8801fb-8669-4969-bd1c-adc8ee1fb5fe

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca4e9b53
	b6f80639-6274-452d-9432-4587e1b94260

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca4e9b87
	89cf3409-b7df-4f98-8b01-6fa4c6e4771f

/tidb/cdc/default/default/changefeed/info/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"upstream-id":7365351868832666162,"namespace":"default","changefeed-id":"3ceb5c38-c007-4333-80cc-2d665dbf4092","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-27690?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:27:16.860494755+08:00","start-ts":449545405850976257,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545407515590661}

/tidb/cdc/default/default/changefeed/status/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"checkpoint-ts":449545409206157322,"min-table-barrier-ts":449545409468301320,"admin-job-type":0}

/tidb/cdc/default/default/task/position/6e8801fb-8669-4969-bd1c-adc8ee1fb5fe/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/task/position/89cf3409-b7df-4f98-8b01-6fa4c6e4771f/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/task/position/b6f80639-6274-452d-9432-4587e1b94260/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/upstream/7365351868832666162
	{"id":7365351868832666162,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** processors info ***:

changefeedID: default/3ceb5c38-c007-4333-80cc-2d665dbf4092
{UpstreamID:7365351868832666162 Namespace:default ID:3ceb5c38-c007-4333-80cc-2d665dbf4092 SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-27690?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-05 11:27:16.860494755 +0800 CST StartTs:449545405850976257 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc00325a630 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545407515590661}
{CheckpointTs:449545409206157322 MinTableBarrierTs:449545409468301320 AdminJobType:noop}



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/6e8801fb-8669-4969-bd1c-adc8ee1fb5fe
	{"id":"6e8801fb-8669-4969-bd1c-adc8ee1fb5fe","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879633}

/tidb/cdc/default/__cdc_meta__/capture/89cf3409-b7df-4f98-8b01-6fa4c6e4771f
	{"id":"89cf3409-b7df-4f98-8b01-6fa4c6e4771f","address":"127.0.0.1:8302","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879641}

/tidb/cdc/default/__cdc_meta__/capture/b6f80639-6274-452d-9432-4587e1b94260
	{"id":"b6f80639-6274-452d-9432-4587e1b94260","address":"127.0.0.1:8301","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879638}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca4e9afd
	6e8801fb-8669-4969-bd1c-adc8ee1fb5fe

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca4e9b53
	b6f80639-6274-452d-9432-4587e1b94260

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca4e9b87
	89cf3409-b7df-4f98-8b01-6fa4c6e4771f

/tidb/cdc/default/default/changefeed/info/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"upstream-id":7365351868832666162,"namespace":"default","changefeed-id":"3ceb5c38-c007-4333-80cc-2d665dbf4092","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-27690?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:27:16.860494755+08:00","start-ts":449545405850976257,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545407515590661}

/tidb/cdc/default/default/changefeed/status/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"checkpoint-ts":449545409206157322,"min-table-barrier-ts":449545409468301320,"admin-job-type":0}

/tidb/cdc/default/default/task/position/6e8801fb-8669-4969-bd1c-adc8ee1fb5fe/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/task/position/89cf3409-b7df-4f98-8b01-6fa4c6e4771f/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/task/position/b6f80639-6274-452d-9432-4587e1b94260/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/upstream/7365351868832666162
	{"id":7365351868832666162,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ echo '

*** processors info ***:

changefeedID: default/3ceb5c38-c007-4333-80cc-2d665dbf4092
{UpstreamID:7365351868832666162 Namespace:default ID:3ceb5c38-c007-4333-80cc-2d665dbf4092 SinkURI:kafka://127.0.0.1:9092/ticdc-move-table-test-27690?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-05 11:27:16.860494755 +0800 CST StartTs:449545405850976257 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc00325a630 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545407515590661}
{CheckpointTs:449545409206157322 MinTableBarrierTs:449545409468301320 AdminJobType:noop}



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/6e8801fb-8669-4969-bd1c-adc8ee1fb5fe
	{"id":"6e8801fb-8669-4969-bd1c-adc8ee1fb5fe","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879633}

/tidb/cdc/default/__cdc_meta__/capture/89cf3409-b7df-4f98-8b01-6fa4c6e4771f
	{"id":"89cf3409-b7df-4f98-8b01-6fa4c6e4771f","address":"127.0.0.1:8302","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879641}

/tidb/cdc/default/__cdc_meta__/capture/b6f80639-6274-452d-9432-4587e1b94260
	{"id":"b6f80639-6274-452d-9432-4587e1b94260","address":"127.0.0.1:8301","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879638}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca4e9afd
	6e8801fb-8669-4969-bd1c-adc8ee1fb5fe

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca4e9b53
	b6f80639-6274-452d-9432-4587e1b94260

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca4e9b87
	89cf3409-b7df-4f98-8b01-6fa4c6e4771f

/tidb/cdc/default/default/changefeed/info/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"upstream-id":7365351868832666162,"namespace":"default","changefeed-id":"3ceb5c38-c007-4333-80cc-2d665dbf4092","sink-uri":"kafka://127.0.0.1:9092/ticdc-move-table-test-27690?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:27:16.860494755+08:00","start-ts":449545405850976257,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545407515590661}

/tidb/cdc/default/default/changefeed/status/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"checkpoint-ts":449545409206157322,"min-table-barrier-ts":449545409468301320,"admin-job-type":0}

/tidb/cdc/default/default/task/position/6e8801fb-8669-4969-bd1c-adc8ee1fb5fe/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/task/position/89cf3409-b7df-4f98-8b01-6fa4c6e4771f/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/task/position/b6f80639-6274-452d-9432-4587e1b94260/3ceb5c38-c007-4333-80cc-2d665dbf4092
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/upstream/7365351868832666162
	{"id":7365351868832666162,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ break
+ set +x
table move_table.usertable exists
go: downloading github.com/benbjohnson/clock v1.3.5
go: downloading github.com/IBM/sarama v1.41.2
go: downloading github.com/xdg/scram v1.0.5
go: downloading go.etcd.io/etcd/server/v3 v3.5.12
go: downloading github.com/aws/aws-sdk-go-v2 v1.19.1
go: downloading github.com/apache/pulsar-client-go v0.11.0
go: downloading github.com/tinylib/msgp v1.1.6
go: downloading github.com/KimMachineGun/automemlimit v0.2.4
go: downloading github.com/go-mysql-org/go-mysql v1.7.1-0.20240314115043-2199dfb0ba98
go: downloading github.com/pierrec/lz4/v4 v4.1.18
go: downloading github.com/cakturk/go-netstat v0.0.0-20200220111822-e5b49efee7a5
go: downloading gorm.io/gorm v1.24.5
go: downloading github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0
go: downloading github.com/phayes/freeport v0.0.0-20180830031419-95f893ade6f2
go: downloading github.com/gavv/monotime v0.0.0-20190418164738-30dba4353424
go: downloading github.com/edwingeng/deque v0.0.0-20191220032131-8596380dee17
go: downloading github.com/containerd/cgroups v1.0.4
go: downloading github.com/xdg/stringprep v1.0.3
go: downloading github.com/stretchr/objx v0.5.2
go: downloading github.com/gin-gonic/gin v1.9.1
go: downloading github.com/philhofer/fwd v1.1.1
go: downloading github.com/opencontainers/runtime-spec v1.0.2
go: downloading github.com/godbus/dbus/v5 v5.0.4
go: downloading github.com/cilium/ebpf v0.4.0
go: downloading github.com/sirupsen/logrus v1.9.3
go: downloading github.com/jinzhu/now v1.1.5
go: downloading github.com/jinzhu/inflection v1.0.0
go: downloading github.com/gin-contrib/sse v0.1.0
go: downloading github.com/ugorji/go/codec v1.2.11
go: downloading github.com/pelletier/go-toml/v2 v2.0.8
go: downloading github.com/go-playground/validator/v10 v10.14.0
go: downloading github.com/siddontang/go v0.0.0-20180604090527-bdc77568d726
go: downloading github.com/siddontang/go-log v0.0.0-20180807004314-8d05993dda07
go: downloading github.com/godbus/dbus v0.0.0-20190726142602-4481cbc300e2
go: downloading github.com/aws/smithy-go v1.13.5
go: downloading github.com/eapache/go-resiliency v1.4.0
go: downloading github.com/hashicorp/go-multierror v1.1.1
go: downloading github.com/jcmturner/gokrb5/v8 v8.4.4
go: downloading github.com/eapache/go-xerial-snappy v0.0.0-20230731223053-c322873962e3
go: downloading github.com/jcmturner/gofork v1.7.6
go: downloading github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475
go: downloading github.com/eapache/queue v1.1.0
go: downloading github.com/hashicorp/errwrap v1.0.0
go: downloading github.com/bits-and-blooms/bitset v1.4.0
go: downloading github.com/99designs/keyring v1.2.1
go: downloading github.com/linkedin/goavro/v2 v2.11.1
go: downloading golang.org/x/mod v0.17.0
go: downloading github.com/pierrec/lz4 v2.6.1+incompatible
go: downloading github.com/spaolacci/murmur3 v1.1.0
go: downloading github.com/AthenZ/athenz v1.10.39
go: downloading github.com/go-playground/universal-translator v0.18.1
go: downloading github.com/gabriel-vasile/mimetype v1.4.2
go: downloading github.com/leodido/go-urn v1.2.4
go: downloading github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c
go: downloading github.com/mtibben/percent v0.2.1
go: downloading github.com/dvsekhvalnov/jose2go v1.5.0
go: downloading github.com/go-playground/locales v0.14.1
go: downloading github.com/soheilhy/cmux v0.1.5
go: downloading github.com/tmc/grpc-websocket-proxy v0.0.0-20220101234140-673ab2c3ae75
go: downloading go.etcd.io/etcd/pkg/v3 v3.5.12
go: downloading go.etcd.io/bbolt v1.3.9
go: downloading github.com/grpc-ecosystem/grpc-gateway v1.16.0
go: downloading sigs.k8s.io/yaml v1.4.0
go: downloading go.opentelemetry.io/otel/sdk v1.22.0
go: downloading go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracegrpc v1.22.0
go: downloading github.com/jonboulle/clockwork v0.4.0
go: downloading go.etcd.io/etcd/raft/v3 v3.5.12
go: downloading github.com/xiang90/probing v0.0.0-20221125231312-a49e3df8f510
go: downloading github.com/golang-jwt/jwt/v4 v4.5.0
go: downloading go.etcd.io/etcd/client/v2 v2.305.12
go: downloading github.com/gorilla/websocket v1.5.1
go: downloading go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.22.0
go: downloading github.com/hashicorp/go-uuid v1.0.3
go: downloading github.com/jcmturner/dnsutils/v2 v2.0.0
go: downloading github.com/jcmturner/rpc/v2 v2.0.3
go: downloading github.com/jcmturner/aescts/v2 v2.0.0
go: downloading go.opentelemetry.io/proto/otlp v1.1.0
go: downloading github.com/cenkalti/backoff/v4 v4.2.1
go: downloading github.com/grpc-ecosystem/grpc-gateway/v2 v2.19.1
table test.finish not exists for 2-th check, retry later
+ set +x
+ tso='449545409315733505
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545409315733505 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
[Sun May  5 11:27:25 CST 2024] <<<<<< START cdc server in ddl_sequence case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ GO_FAILPOINTS=
+ (( i = 0 ))
+ (( i <= 50 ))
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_sequence.98349836.out server --log-file /tmp/tidb_cdc_test/ddl_sequence/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_sequence/cdc_data --cluster-id default
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
go: downloading github.com/ardielle/ardielle-go v1.5.2
table mark.finish_mark_3 not exists for 22-th check, retry later
[Sun May  5 11:27:26 CST 2024] <<<<<< START cdc server in batch_add_table case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ GO_FAILPOINTS=
+ (( i = 0 ))
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.batch_add_table.58845886.out server --log-file /tmp/tidb_cdc_test/batch_add_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/batch_add_table/cdc_data --cluster-id default
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 3-th check, retry later
split_and_random_merge scale: 80
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:27:28 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/18b75dec-8434-4064-9000-1c1e5b0a5123
	{"id":"18b75dec-8434-4064-9000-1c1e5b0a5123","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879645}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca7d5cd2
	18b75dec-8434-4064-9000-1c1e5b0a5123

/tidb/cdc/default/default/upstream/7365351927335579319
	{"id":7365351927335579319,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/18b75dec-8434-4064-9000-1c1e5b0a5123
	{"id":"18b75dec-8434-4064-9000-1c1e5b0a5123","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879645}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca7d5cd2
	18b75dec-8434-4064-9000-1c1e5b0a5123

/tidb/cdc/default/default/upstream/7365351927335579319
	{"id":7365351927335579319,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/18b75dec-8434-4064-9000-1c1e5b0a5123
	{"id":"18b75dec-8434-4064-9000-1c1e5b0a5123","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879645}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca7d5cd2
	18b75dec-8434-4064-9000-1c1e5b0a5123

/tidb/cdc/default/default/upstream/7365351927335579319
	{"id":7365351927335579319,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_sequence.cli.9901.out cli changefeed create --start-ts=449545409315733505 '--sink-uri=kafka://127.0.0.1:9092/ticdc-ddl-sequence-test-8906?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760'
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Create changefeed successfully!
ID: 51bf7b66-be05-4c42-88a6-3ed6129fc91e
Info: {"upstream_id":7365351927335579319,"namespace":"default","id":"51bf7b66-be05-4c42-88a6-3ed6129fc91e","sink_uri":"kafka://127.0.0.1:9092/ticdc-ddl-sequence-test-8906?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:27:28.703671694+08:00","start_ts":449545409315733505,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545409315733505,"checkpoint_ts":449545409315733505,"checkpoint_time":"2024-05-05 11:27:23.691"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
table test.finish not exists for 4-th check, retry later
table mark.finish_mark_3 not exists for 23-th check, retry later
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:27:29 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/631f8ef5-4eaf-43ed-80b4-46070edd1373
	{"id":"631f8ef5-4eaf-43ed-80b4-46070edd1373","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879646}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca8f0f15
	631f8ef5-4eaf-43ed-80b4-46070edd1373

/tidb/cdc/default/default/upstream/7365351936366194614
	{"id":7365351936366194614,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/631f8ef5-4eaf-43ed-80b4-46070edd1373
	{"id":"631f8ef5-4eaf-43ed-80b4-46070edd1373","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879646}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca8f0f15
	631f8ef5-4eaf-43ed-80b4-46070edd1373

/tidb/cdc/default/default/upstream/7365351936366194614
	{"id":7365351936366194614,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/631f8ef5-4eaf-43ed-80b4-46070edd1373
	{"id":"631f8ef5-4eaf-43ed-80b4-46070edd1373","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879646}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca8f0f15
	631f8ef5-4eaf-43ed-80b4-46070edd1373

/tidb/cdc/default/default/upstream/7365351936366194614
	{"id":7365351936366194614,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.batch_add_table.cli.5945.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-batch-add-table-test-28002?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760'
Create changefeed successfully!
ID: da0c3da5-b6cd-4781-92df-83995b380dfa
Info: {"upstream_id":7365351936366194614,"namespace":"default","id":"da0c3da5-b6cd-4781-92df-83995b380dfa","sink_uri":"kafka://127.0.0.1:9092/ticdc-batch-add-table-test-28002?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:27:29.578923458+08:00","start_ts":449545410818080773,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545410818080773,"checkpoint_ts":449545410818080773,"checkpoint_time":"2024-05-05 11:27:29.422"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
start tidb cluster in /tmp/tidb_cdc_test/kafka_simple_handle_key_only
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
+ set +x
[Sun May  5 11:27:30 CST 2024] <<<<<< START kafka consumer in ddl_sequence case >>>>>>
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table mark.finish_mark_3 not exists for 24-th check, retry later
+ set +x
[Sun May  5 11:27:31 CST 2024] <<<<<< START kafka consumer in batch_add_table case >>>>>>
table test.finish not exists for 5-th check, retry later
table batch_add_table.finish_mark not exists for 1-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2b7d2c0020	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd, pid:33738, start at 2024-05-05 11:27:31.708092184 +0800 CST m=+5.174939864	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:29:31.715 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:27:31.708 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:17:31.708 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2b7d2c0020	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd, pid:33738, start at 2024-05-05 11:27:31.708092184 +0800 CST m=+5.174939864	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:29:31.715 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:27:31.708 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:17:31.708 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2b7ea40015	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pq5wq-7v8vd, pid:33817, start at 2024-05-05 11:27:31.791919169 +0800 CST m=+5.207710956	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:29:31.801 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:27:31.803 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:17:31.803 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
table mark.finish_mark_3 not exists for 25-th check, retry later
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/debezium/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/debezium/tiflash/log/error.log
arg matches is ArgMatches { args: {"log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/debezium/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/debezium/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/debezium/tiflash-proxy.toml"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table test.finish not exists for 6-th check, retry later
table batch_add_table.finish_mark exists
check diff successfully
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
wait process cdc.test exit for 1-th time...
*************************** 1. row ***************************
count(distinct region_id): 1
table mark.finish_mark_3 not exists for 26-th check, retry later
[Sun May  5 11:27:34 CST 2024] <<<<<< START cdc server in debezium case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.debezium.3525135253.out server --log-file /tmp/tidb_cdc_test/debezium/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/debezium/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
wait process cdc.test exit for 2-th time...
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/cli_with_auth/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
table test.finish not exists for 7-th check, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:27:35 CST 2024] <<<<<< run test case batch_add_table success! >>>>>>
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table ddl_sequence.finish_mark not exists for 1-th check, retry later
table test.finish not exists for 8-th check, retry later
table mark.finish_mark_3 not exists for 27-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:27:37 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/71d2383f-4d92-4b26-8ee2-dba2816b685b
	{"id":"71d2383f-4d92-4b26-8ee2-dba2816b685b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879655}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cab353ca
	71d2383f-4d92-4b26-8ee2-dba2816b685b

/tidb/cdc/default/default/upstream/7365351976307555285
	{"id":7365351976307555285,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/71d2383f-4d92-4b26-8ee2-dba2816b685b
	{"id":"71d2383f-4d92-4b26-8ee2-dba2816b685b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879655}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cab353ca
	71d2383f-4d92-4b26-8ee2-dba2816b685b

/tidb/cdc/default/default/upstream/7365351976307555285
	{"id":7365351976307555285,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/71d2383f-4d92-4b26-8ee2-dba2816b685b
	{"id":"71d2383f-4d92-4b26-8ee2-dba2816b685b","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879655}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cab353ca
	71d2383f-4d92-4b26-8ee2-dba2816b685b

/tidb/cdc/default/default/upstream/7365351976307555285
	{"id":7365351976307555285,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.debezium.cli.35306.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/output_ticdc?protocol=debezium&kafka-version=2.4.0'
Create changefeed successfully!
ID: 7bf3570c-763c-41d2-bc19-f1f53964eb35
Info: {"upstream_id":7365351976307555285,"namespace":"default","id":"7bf3570c-763c-41d2-bc19-f1f53964eb35","sink_uri":"kafka://127.0.0.1:9092/output_ticdc?protocol=debezium\u0026kafka-version=2.4.0","create_time":"2024-05-05T11:27:38.394515304+08:00","start_ts":449545413134647298,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"debezium","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545413134647298,"checkpoint_ts":449545413134647298,"checkpoint_time":"2024-05-05 11:27:38.259"}
PASS
[2024/05/05 11:27:37.334 +08:00] [INFO] [main.go:61] ["table mover started"]
[2024/05/05 11:27:37.338 +08:00] [INFO] [main.go:166] ["new cluster initialized"]
[2024/05/05 11:27:37.338 +08:00] [DEBUG] [main.go:192] ["retrieved owner ID"] [ownerID=6e8801fb-8669-4969-bd1c-adc8ee1fb5fe]
[2024/05/05 11:27:37.338 +08:00] [DEBUG] [main.go:199] ["retrieved owner addr"] [ownerAddr=127.0.0.1:8300]
[2024/05/05 11:27:37.339 +08:00] [DEBUG] [main.go:210] ["retrieved changefeeds"] [changefeedsError="json: unsupported type: map[model.ChangeFeedID]*mvccpb.KeyValue"]
[2024/05/05 11:27:37.493 +08:00] [DEBUG] [main.go:229] ["retrieved processor details"] [changefeed=3ceb5c38-c007-4333-80cc-2d665dbf4092] [captureID=6e8801fb-8669-4969-bd1c-adc8ee1fb5fe] [processorDetail="{\"table_ids\":[106,108]}"]
[2024/05/05 11:27:37.692 +08:00] [DEBUG] [main.go:229] ["retrieved processor details"] [changefeed=3ceb5c38-c007-4333-80cc-2d665dbf4092] [captureID=89cf3409-b7df-4f98-8b01-6fa4c6e4771f] [processorDetail="{\"table_ids\":[]}"]
[2024/05/05 11:27:37.893 +08:00] [DEBUG] [main.go:229] ["retrieved processor details"] [changefeed=3ceb5c38-c007-4333-80cc-2d665dbf4092] [captureID=b6f80639-6274-452d-9432-4587e1b94260] [processorDetail="{\"table_ids\":[]}"]
[2024/05/05 11:27:37.893 +08:00] [INFO] [main.go:75] ["task status"] [status="{\"6e8801fb-8669-4969-bd1c-adc8ee1fb5fe\":[{\"ID\":106,\"Changefeed\":\"3ceb5c38-c007-4333-80cc-2d665dbf4092\"},{\"ID\":108,\"Changefeed\":\"3ceb5c38-c007-4333-80cc-2d665dbf4092\"}],\"89cf3409-b7df-4f98-8b01-6fa4c6e4771f\":[],\"b6f80639-6274-452d-9432-4587e1b94260\":[]}"]
[2024/05/05 11:27:37.893 +08:00] [DEBUG] [main.go:288] ["preparing HTTP API call to owner"] [formStr="cf-id=3ceb5c38-c007-4333-80cc-2d665dbf4092&target-cp-id=b6f80639-6274-452d-9432-4587e1b94260&table-id=106"]
[2024/05/05 11:27:37.942 +08:00] [INFO] [main.go:180] ["moved table successful"] [tableID=106]
[2024/05/05 11:27:37.942 +08:00] [DEBUG] [main.go:288] ["preparing HTTP API call to owner"] [formStr="cf-id=3ceb5c38-c007-4333-80cc-2d665dbf4092&target-cp-id=b6f80639-6274-452d-9432-4587e1b94260&table-id=108"]
[2024/05/05 11:27:37.993 +08:00] [INFO] [main.go:180] ["moved table successful"] [tableID=108]
[2024/05/05 11:27:37.993 +08:00] [INFO] [main.go:114] ["all tables are moved"] [sourceCapture=6e8801fb-8669-4969-bd1c-adc8ee1fb5fe] [targetCapture=b6f80639-6274-452d-9432-4587e1b94260]
table move_table.check1 exists
check diff successfully
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
table ddl_sequence.finish_mark not exists for 2-th check, retry later
table test.finish not exists for 9-th check, retry later
table mark.finish_mark_3 not exists for 28-th check, retry later
start tidb cluster in /tmp/tidb_cdc_test/cli_with_auth
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table region_merge.t1 exists
check diff failed 1-th time, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ set +x
go: downloading github.com/pingcap/tidb v1.1.0-beta.0.20231117065153-a4f85c356873
go: downloading github.com/alecthomas/chroma v0.10.0
go: downloading github.com/google/uuid v1.3.1
go: downloading github.com/fatih/color v1.16.0
go: downloading github.com/pingcap/tidb/pkg/parser v0.0.0-20231116213047-1f7c1e02bcd4
go: downloading github.com/google/go-cmp v0.6.0
go: downloading go.uber.org/zap v1.26.0
go: downloading github.com/segmentio/kafka-go v0.4.45
go: downloading github.com/thessem/zap-prettyconsole v0.3.0
go: downloading github.com/mattn/go-colorable v0.1.13
go: downloading golang.org/x/sys v0.14.0
go: downloading github.com/Code-Hex/dd v1.1.0
go: downloading github.com/klauspost/compress v1.17.1
go: downloading github.com/pierrec/lz4/v4 v4.1.15
go: downloading github.com/dlclark/regexp2 v1.4.0
table ddl_sequence.finish_mark not exists for 3-th check, retry later
table mark.finish_mark_3 not exists for 29-th check, retry later
check diff successfully
table test.finish not exists for 10-th check, retry later
wait process cdc.test exit for 1-th time...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2c09980019	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:11560, start at 2024-05-05 11:27:40.687768382 +0800 CST m=+5.380971040	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:29:40.695 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:27:40.697 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:17:40.697 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[2024/05/05 11:27:41.839 +08:00] [INFO] [main.go:61] ["table mover started"]
[2024/05/05 11:27:41.841 +08:00] [INFO] [main.go:166] ["new cluster initialized"]
[2024/05/05 11:27:41.841 +08:00] [DEBUG] [main.go:192] ["retrieved owner ID"] [ownerID=6e8801fb-8669-4969-bd1c-adc8ee1fb5fe]
[2024/05/05 11:27:41.842 +08:00] [DEBUG] [main.go:199] ["retrieved owner addr"] [ownerAddr=127.0.0.1:8300]
[2024/05/05 11:27:41.842 +08:00] [DEBUG] [main.go:210] ["retrieved changefeeds"] [changefeedsError="json: unsupported type: map[model.ChangeFeedID]*mvccpb.KeyValue"]
[2024/05/05 11:27:42.043 +08:00] [DEBUG] [main.go:229] ["retrieved processor details"] [changefeed=3ceb5c38-c007-4333-80cc-2d665dbf4092] [captureID=6e8801fb-8669-4969-bd1c-adc8ee1fb5fe] [processorDetail="{\"table_ids\":[]}"]
wait process cdc.test exit for 2-th time...
[2024/05/05 11:27:42.242 +08:00] [DEBUG] [main.go:229] ["retrieved processor details"] [changefeed=3ceb5c38-c007-4333-80cc-2d665dbf4092] [captureID=89cf3409-b7df-4f98-8b01-6fa4c6e4771f] [processorDetail="{\"table_ids\":[110]}"]
table ddl_sequence.finish_mark not exists for 4-th check, retry later
go: downloading github.com/pingcap/errors v0.11.5-0.20221009092201-b66cddb77c32
go: downloading github.com/pingcap/log v1.1.1-0.20230317032135-a0d097d16e22
go: downloading golang.org/x/exp v0.0.0-20231006140011-7918f672742d
go: downloading github.com/pingcap/sysutil v1.0.1-0.20230407040306-fb007c5aff21
go: downloading github.com/golang/protobuf v1.5.3
go: downloading github.com/tikv/client-go/v2 v2.0.8-0.20231114060955-8fc8a528217e
go: downloading github.com/prometheus/client_model v0.5.0
go: downloading github.com/shirou/gopsutil/v3 v3.23.10
go: downloading github.com/grpc-ecosystem/go-grpc-middleware v1.3.0
go: downloading github.com/cockroachdb/errors v1.8.1
go: downloading github.com/prometheus/client_golang v1.17.0
go: downloading google.golang.org/protobuf v1.31.0
go: downloading github.com/pingcap/kvproto v0.0.0-20230925123611-87bebcc0d071
go: downloading google.golang.org/grpc v1.59.0
go: downloading github.com/cespare/xxhash/v2 v2.2.0
go: downloading github.com/prometheus/procfs v0.12.0
go: downloading github.com/prometheus/common v0.45.0
go: downloading github.com/cockroachdb/redact v1.0.8
go: downloading github.com/cockroachdb/logtags v0.0.0-20190617123548-eb05cc24525f
go: downloading github.com/cockroachdb/sentry-go v0.6.1-cockroachdb.2
go: downloading github.com/rogpeppe/go-internal v1.11.0
go: downloading github.com/tikv/pd/client v0.0.0-20231114041114-86831ce71865
go: downloading go.etcd.io/etcd/api/v3 v3.5.10
go: downloading golang.org/x/sync v0.4.0
go: downloading go.etcd.io/etcd/client/v3 v3.5.10
go: downloading github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0
go: downloading go.etcd.io/etcd/client/pkg/v3 v3.5.10
go: downloading google.golang.org/genproto/googleapis/api v0.0.0-20231016165738-49dd2c1f3d0b
go: downloading google.golang.org/genproto v0.0.0-20231016165738-49dd2c1f3d0b
go: downloading golang.org/x/net v0.18.0
go: downloading google.golang.org/genproto/googleapis/rpc v0.0.0-20231016165738-49dd2c1f3d0b
[2024/05/05 11:27:42.443 +08:00] [DEBUG] [main.go:229] ["retrieved processor details"] [changefeed=3ceb5c38-c007-4333-80cc-2d665dbf4092] [captureID=b6f80639-6274-452d-9432-4587e1b94260] [processorDetail="{\"table_ids\":[108]}"]
[2024/05/05 11:27:42.443 +08:00] [INFO] [main.go:75] ["task status"] [status="{\"6e8801fb-8669-4969-bd1c-adc8ee1fb5fe\":[],\"89cf3409-b7df-4f98-8b01-6fa4c6e4771f\":[{\"ID\":110,\"Changefeed\":\"3ceb5c38-c007-4333-80cc-2d665dbf4092\"}],\"b6f80639-6274-452d-9432-4587e1b94260\":[{\"ID\":108,\"Changefeed\":\"3ceb5c38-c007-4333-80cc-2d665dbf4092\"}]}"]
[2024/05/05 11:27:42.443 +08:00] [DEBUG] [main.go:288] ["preparing HTTP API call to owner"] [formStr="cf-id=3ceb5c38-c007-4333-80cc-2d665dbf4092&target-cp-id=b6f80639-6274-452d-9432-4587e1b94260&table-id=110"]
[2024/05/05 11:27:42.493 +08:00] [INFO] [main.go:180] ["moved table successful"] [tableID=110]
[2024/05/05 11:27:42.493 +08:00] [INFO] [main.go:114] ["all tables are moved"] [sourceCapture=89cf3409-b7df-4f98-8b01-6fa4c6e4771f] [targetCapture=b6f80639-6274-452d-9432-4587e1b94260]
check diff successfully
table mark.finish_mark_3 not exists for 30-th check, retry later
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:27:42 CST 2024] <<<<<< run test case region_merge success! >>>>>>
table move_table.check2 not exists for 1-th check, retry later
table test.finish not exists for 11-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2c09980019	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:11560, start at 2024-05-05 11:27:40.687768382 +0800 CST m=+5.380971040	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:29:40.695 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:27:40.697 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:17:40.697 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2c09980014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:11638, start at 2024-05-05 11:27:40.680179833 +0800 CST m=+5.316684947	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:29:40.688 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:27:40.646 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:17:40.646 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/kafka_simple_handle_key_only/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_simple_handle_key_only/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_simple_handle_key_only/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_simple_handle_key_only/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table move_table.check2 exists
table ddl_sequence.finish_mark exists
check diff successfully
check diff successfully
wait process cdc.test exit for 1-th time...
table test.finish not exists for 12-th check, retry later
table mark.finish_mark_3 not exists for 31-th check, retry later
wait process cdc.test exit for 1-th time...
wait process cdc.test exit for 2-th time...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:27:46 CST 2024] <<<<<< run test case ddl_sequence success! >>>>>>
wait process cdc.test exit for 2-th time...
[Sun May  5 11:27:45 CST 2024] <<<<<< START cdc server in kafka_simple_handle_key_only case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only.1303613038.out server --log-file /tmp/tidb_cdc_test/kafka_simple_handle_key_only/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_simple_handle_key_only/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
wait process cdc.test exit for 3-th time...
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
cdc.test: no process found
wait process cdc.test exit for 4-th time...
process cdc.test already exit
[Sun May  5 11:27:46 CST 2024] <<<<<< run test case move_table success! >>>>>>
table test.finish not exists for 13-th check, retry later
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_rocks/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
table mark.finish_mark_3 not exists for 32-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:27:48 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
table test.finish not exists for 14-th check, retry later
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/f1a0aa05-45a3-4350-a44c-c0cd27703d02
	{"id":"f1a0aa05-45a3-4350-a44c-c0cd27703d02","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879666}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cad79bcd
	f1a0aa05-45a3-4350-a44c-c0cd27703d02

/tidb/cdc/default/default/upstream/7365352022769233855
	{"id":7365352022769233855,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/f1a0aa05-45a3-4350-a44c-c0cd27703d02
	{"id":"f1a0aa05-45a3-4350-a44c-c0cd27703d02","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879666}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cad79bcd
	f1a0aa05-45a3-4350-a44c-c0cd27703d02

/tidb/cdc/default/default/upstream/7365352022769233855
	{"id":7365352022769233855,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/f1a0aa05-45a3-4350-a44c-c0cd27703d02
	{"id":"f1a0aa05-45a3-4350-a44c-c0cd27703d02","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879666}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cad79bcd
	f1a0aa05-45a3-4350-a44c-c0cd27703d02

/tidb/cdc/default/default/upstream/7365352022769233855
	{"id":7365352022769233855,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only.cli.13095.out cli tso query --pd=http://127.0.0.1:2379
table mark.finish_mark_3 not exists for 33-th check, retry later
+ set +x
+ tso='449545415975763970
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545415975763970 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only.cli.13135.out cli changefeed create --start-ts=449545415975763970 '--sink-uri=kafka://127.0.0.1:9092/simple-handle-key-only-835?protocol=simple' -c simple-handle-key-only --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only/conf/changefeed.toml
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Create changefeed successfully!
ID: simple-handle-key-only
Info: {"upstream_id":7365352022769233855,"namespace":"default","id":"simple-handle-key-only","sink_uri":"kafka://127.0.0.1:9092/simple-handle-key-only-835?protocol=simple","create_time":"2024-05-05T11:27:51.000344198+08:00","start_ts":449545415975763970,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"simple","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"kafka_config":{"large_message_handle":{"large_message_handle_option":"handle-key-only","large_message_handle_compression":"lz4","claim_check_storage_uri":""}},"advance_timeout":150,"send_bootstrap_interval_in_sec":0,"send_bootstrap_in_msg_count":0,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545415975763970,"checkpoint_ts":449545415975763970,"checkpoint_time":"2024-05-05 11:27:49.097"}
PASS
coverage: 2.5% of statements in github.com/pingcap/tiflow/...
table mark.finish_mark_3 not exists for 34-th check, retry later
start tidb cluster in /tmp/tidb_cdc_test/multi_rocks
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table test.finish not exists for 15-th check, retry later
+ set +x
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table mark.finish_mark_3 not exists for 35-th check, retry later
table test.finish not exists for 16-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2cb6740013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3, pid:11063, start at 2024-05-05 11:27:51.742395183 +0800 CST m=+5.314919997	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:29:51.749 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:27:51.709 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:17:51.709 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2cb6740013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3, pid:11063, start at 2024-05-05 11:27:51.742395183 +0800 CST m=+5.314919997	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:29:51.749 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:27:51.709 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:17:51.709 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2cb7240018	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3, pid:11144, start at 2024-05-05 11:27:51.787044704 +0800 CST m=+5.307870982	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:29:51.793 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:27:51.753 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:17:51.753 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/cli_with_auth/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/cli_with_auth/tiflash/log/error.log
arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/cli_with_auth/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/cli_with_auth/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/cli_with_auth/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/common_1/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
table mark.finish_mark_3 not exists for 36-th check, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Debugger for raftstore-v2 is used
Debugger for raftstore-v2 is used
Debugger for raftstore-v2 is used
table test.finish not exists for 17-th check, retry later
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.12615.out cli tso query --pd=http://127.0.0.1:2379
start tidb cluster in /tmp/tidb_cdc_test/common_1
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table mark.finish_mark_3 not exists for 37-th check, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 18-th check, retry later
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/resourcecontrol/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
+ set +x
+ tso='449545418075799553
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545418075799553 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
Debugger for raftstore-v2 is used
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only.cli.13189.out cli changefeed pause -c simple-handle-key-only
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/autorandom/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
table mark.finish_mark_3 not exists for 38-th check, retry later
table test.finish not exists for 19-th check, retry later
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only.cli.13228.out cli changefeed update -c simple-handle-key-only '--sink-uri=kafka://127.0.0.1:9092/simple-handle-key-only-835?protocol=simple&max-message-bytes=700' --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only/conf/changefeed.toml --no-confirm
[Sun May  5 11:27:59 CST 2024] <<<<<< START cdc server in cli_with_auth case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ GO_FAILPOINTS=
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.1267312675.out server --log-file /tmp/tidb_cdc_test/cli_with_auth/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/cli_with_auth/cdc_data --cluster-id default --config /tmp/tidb_cdc_test/cli_with_auth/server.toml
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
Diff of changefeed config:
{Type:update Path:[SinkURI] From:kafka://127.0.0.1:9092/simple-handle-key-only-835?protocol=simple To:kafka://127.0.0.1:9092/simple-handle-key-only-835?protocol=simple&max-message-bytes=700}
{Type:update Path:[Config SyncPointInterval] From:<nil> To:0xc001453588}
{Type:update Path:[Config SyncPointRetention] From:<nil> To:0xc001453598}
{Type:update Path:[Config Consistent] From:<nil> To:0xc001364380}
Update changefeed config successfully! 
ID: simple-handle-key-only
Info: {"upstream_id":7365352022769233855,"namespace":"default","id":"simple-handle-key-only","sink_uri":"kafka://127.0.0.1:9092/simple-handle-key-only-835?protocol=simple\u0026max-message-bytes=700","create_time":"2024-05-05T11:27:51.000344198+08:00","start_ts":449545415975763970,"admin_job_type":1,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_table_monitor":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"simple","encoder_concurrency":32,"terminator":"\r\n","enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"kafka_config":{"large_message_handle":{"large_message_handle_option":"handle-key-only","large_message_handle_compression":"lz4","claim_check_storage_uri":""}},"advance_timeout":150,"send_bootstrap_interval_in_sec":0,"send_bootstrap_in_msg_count":0,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"stopped","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":0,"checkpoint_ts":449545418138451974,"checkpoint_time":"2024-05-05 11:27:57.347"}
PASS
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
table mark.finish_mark_3 not exists for 39-th check, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table test.finish not exists for 20-th check, retry later
table test.finish_mark not exists for 1-th check, retry later
table test.finish_mark not exists for 2-th check, retry later
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only.cli.13268.out cli changefeed resume -c simple-handle-key-only
PASS
coverage: 2.1% of statements in github.com/pingcap/tiflow/...
table test.finish_mark not exists for 3-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:28:02 GMT
< Content-Length: 859
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/87639868-dd0f-4d6a-b6c2-a225148b47cd
	{"id":"87639868-dd0f-4d6a-b6c2-a225148b47cd","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879680}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cb04f246
	87639868-dd0f-4d6a-b6c2-a225148b47cd

/tidb/cdc/default/default/upstream/7365352071413983013
	{"id":7365352071413983013,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/87639868-dd0f-4d6a-b6c2-a225148b47cd
	{"id":"87639868-dd0f-4d6a-b6c2-a225148b47cd","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879680}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cb04f246
	87639868-dd0f-4d6a-b6c2-a225148b47cd

/tidb/cdc/default/default/upstream/7365352071413983013
	{"id":7365352071413983013,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/87639868-dd0f-4d6a-b6c2-a225148b47cd
	{"id":"87639868-dd0f-4d6a-b6c2-a225148b47cd","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879680}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cb04f246
	87639868-dd0f-4d6a-b6c2-a225148b47cd

/tidb/cdc/default/default/upstream/7365352071413983013
	{"id":7365352071413983013,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2779,http://127.0.0.1:2679,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.12737.out cli changefeed create --start-ts=449545418075799553 '--sink-uri=kafka://127.0.0.1:9092/ticdc-cli-test-24081?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --tz=Asia/Shanghai -c=custom-changefeed-name
[WARN] --tz is deprecated in changefeed settings.
table mark.finish_mark_3 not exists for 40-th check, retry later
Create changefeed successfully!
ID: custom-changefeed-name
Info: {"upstream_id":7365352071413983013,"namespace":"default","id":"custom-changefeed-name","sink_uri":"kafka://127.0.0.1:9092/ticdc-cli-test-24081?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:28:03.291036993+08:00","start_ts":449545418075799553,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545418075799553,"checkpoint_ts":449545418075799553,"checkpoint_time":"2024-05-05 11:27:57.108"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
check_changefeed_state http://127.0.0.1:2379 e81b06b3-399a-4de8-b7d6-41e53a3f6c25 finished null
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=e81b06b3-399a-4de8-b7d6-41e53a3f6c25
+ expected_state=finished
+ error_msg=null
+ tls_dir=null
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c e81b06b3-399a-4de8-b7d6-41e53a3f6c25 -s
+ info='{
  "upstream_id": 7365351657510964239,
  "namespace": "default",
  "id": "e81b06b3-399a-4de8-b7d6-41e53a3f6c25",
  "state": "finished",
  "checkpoint_tso": 449545416790769666,
  "checkpoint_time": "2024-05-05 11:27:52.206",
  "error": null
}'
+ echo '{
  "upstream_id": 7365351657510964239,
  "namespace": "default",
  "id": "e81b06b3-399a-4de8-b7d6-41e53a3f6c25",
  "state": "finished",
  "checkpoint_tso": 449545416790769666,
  "checkpoint_time": "2024-05-05 11:27:52.206",
  "error": null
}'
{
  "upstream_id": 7365351657510964239,
  "namespace": "default",
  "id": "e81b06b3-399a-4de8-b7d6-41e53a3f6c25",
  "state": "finished",
  "checkpoint_tso": 449545416790769666,
  "checkpoint_time": "2024-05-05 11:27:52.206",
  "error": null
}
++ echo '{' '"upstream_id":' 7365351657510964239, '"namespace":' '"default",' '"id":' '"e81b06b3-399a-4de8-b7d6-41e53a3f6c25",' '"state":' '"finished",' '"checkpoint_tso":' 449545416790769666, '"checkpoint_time":' '"2024-05-05' '11:27:52.206",' '"error":' null '}'
++ jq -r .state
+ state=finished
+ [[ ! finished == \f\i\n\i\s\h\e\d ]]
++ echo '{' '"upstream_id":' 7365351657510964239, '"namespace":' '"default",' '"id":' '"e81b06b3-399a-4de8-b7d6-41e53a3f6c25",' '"state":' '"finished",' '"checkpoint_tso":' 449545416790769666, '"checkpoint_time":' '"2024-05-05' '11:27:52.206",' '"error":' null '}'
++ jq -r .error.message
+ message=null
+ [[ ! null =~ null ]]
run task successfully
wait process cdc.test exit for 1-th time...
wait process cdc.test exit for 2-th time...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:27:58 CST 2024] <<<<<< run test case changefeed_finish success! >>>>>>
+ set +x
table test.finish not exists for 21-th check, retry later
table test.finish_mark not exists for 1-th check, retry later
11:28AM INF > Info cdc.mysql=kafka://127.0.0.1:9092/output_debezium cdc.tidb=kafka://127.0.0.1:9092/output_ticdc db.mysql=root@tcp(127.0.0.1:3310)/{db}?allowNativePasswords=true db.tidb=root@tcp(127.0.0.1:4000)/{db}?allowNativePasswords=true
11:28AM INF > Run case=sql/data_types.sql
+ set +x
[Sun May  5 11:28:04 CST 2024] <<<<<< START kafka consumer in cli_with_auth case >>>>>>
table test.simple not exists for 1-th check, retry later
table test.finish_mark not exists for 4-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table mark.finish_mark_3 not exists for 41-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 22-th check, retry later
table test.finish_mark not exists for 2-th check, retry later
start tidb cluster in /tmp/tidb_cdc_test/resourcecontrol
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
start tidb cluster in /tmp/tidb_cdc_test/autorandom
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.simple not exists for 2-th check, retry later
table test.finish_mark not exists for 5-th check, retry later
table mark.finish_mark_3 not exists for 42-th check, retry later
table test.finish not exists for 23-th check, retry later
table test.finish_mark exists
check diff successfully
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
wait process cdc.test exit for 1-th time...
table test.simple exists
table test.`simple-dash` exists
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=custom-changefeed-name
+ expected_state=normal
+ error_msg=null
+ tls_dir=
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c custom-changefeed-name -s
+ info='{
  "upstream_id": 7365352071413983013,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "normal",
  "checkpoint_tso": 449545421051396103,
  "checkpoint_time": "2024-05-05 11:28:08.459",
  "error": null
}'
+ echo '{
  "upstream_id": 7365352071413983013,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "normal",
  "checkpoint_tso": 449545421051396103,
  "checkpoint_time": "2024-05-05 11:28:08.459",
  "error": null
}'
{
  "upstream_id": 7365352071413983013,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "normal",
  "checkpoint_tso": 449545421051396103,
  "checkpoint_time": "2024-05-05 11:28:08.459",
  "error": null
}
++ echo '{' '"upstream_id":' 7365352071413983013, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449545421051396103, '"checkpoint_time":' '"2024-05-05' '11:28:08.459",' '"error":' null '}'
++ jq -r .state
+ state=normal
+ [[ ! normal == \n\o\r\m\a\l ]]
++ echo '{' '"upstream_id":' 7365352071413983013, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449545421051396103, '"checkpoint_time":' '"2024-05-05' '11:28:08.459",' '"error":' null '}'
++ jq -r .error.message
table test.finish_mark not exists for 6-th check, retry later
wait process cdc.test exit for 2-th time...
+ message=null
+ [[ ! null =~ null ]]
changefeed count 1 check pass, pd_addr: http://127.0.0.1:2379
table mark.finish_mark_3 not exists for 43-th check, retry later
changefeed count 1 check pass, pd_addr: http://127.0.0.1:2679
table test.finish not exists for 24-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2dba7c0004	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-nbv84-f5n24, pid:7100, start at 2024-05-05 11:28:08.354684895 +0800 CST m=+5.110217985	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:08.361 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:08.351 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:08.351 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2dba7c0004	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-nbv84-f5n24, pid:7100, start at 2024-05-05 11:28:08.354684895 +0800 CST m=+5.110217985	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:08.361 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:08.351 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:08.351 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2dbb30000b	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-nbv84-f5n24, pid:7188, start at 2024-05-05 11:28:08.406513994 +0800 CST m=+5.113359304	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:08.413 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:08.396 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:08.396 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/common_1/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/common_1/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/common_1/tiflash-proxy.toml"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/common_1/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/common_1/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:28:09 CST 2024] <<<<<< run test case kafka_simple_handle_key_only success! >>>>>>
changefeed count 1 check pass, pd_addr: http://127.0.0.1:2779
changefeed count 1 check pass, pd_addr: http://127.0.0.1:2379,http://127.0.0.1:2679,http://127.0.0.1:2779
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Error: [CDC:ErrChangefeedUpdateRefused]changefeed update error: can only update changefeed config when it is stopped or failed
update changefeed config should fail when changefeed is running, got Diff of changefeed config:
{Type:update Path:[Config CaseSensitive] From:false To:true}
{Type:update Path:[Config SyncPointInterval] From:<nil> To:0xc0017f5460}
{Type:update Path:[Config SyncPointRetention] From:<nil> To:0xc0017f5468}
{Type:update Path:[Config Consistent] From:<nil> To:0xc00141a310}
{Type:update Path:[Config Scheduler EnableTableAcrossNodes] From:false To:true}
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.13134.out cli changefeed --changefeed-id custom-changefeed-name pause
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table test.finish_mark not exists for 7-th check, retry later
PASS
coverage: 1.9% of statements in github.com/pingcap/tiflow/...
table mark.finish_mark_3 not exists for 44-th check, retry later
table test.finish not exists for 25-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ set +x
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish_mark not exists for 8-th check, retry later
table mark.finish_mark_3 not exists for 45-th check, retry later
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/force_replicate_table/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.common_1.cli.8656.out cli tso query --pd=http://127.0.0.1:2379
table test.finish not exists for 26-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish_mark not exists for 9-th check, retry later
+ set +x
+ tso='449545422477983745
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545422477983745 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
[Sun May  5 11:28:15 CST 2024] <<<<<< START cdc server in common_1 case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.common_1.86958697.out server --log-file /tmp/tidb_cdc_test/common_1/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/common_1/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
start tidb cluster in /tmp/tidb_cdc_test/force_replicate_table
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 27-th check, retry later
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=custom-changefeed-name
+ expected_state=stopped
+ error_msg=null
+ tls_dir=
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c custom-changefeed-name -s
+ info='{
  "upstream_id": 7365352071413983013,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "stopped",
  "checkpoint_tso": 449545421575684103,
  "checkpoint_time": "2024-05-05 11:28:10.459",
  "error": null
}'
+ echo '{
  "upstream_id": 7365352071413983013,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "stopped",
  "checkpoint_tso": 449545421575684103,
  "checkpoint_time": "2024-05-05 11:28:10.459",
  "error": null
}'
{
  "upstream_id": 7365352071413983013,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "stopped",
  "checkpoint_tso": 449545421575684103,
  "checkpoint_time": "2024-05-05 11:28:10.459",
  "error": null
}
++ echo '{' '"upstream_id":' 7365352071413983013, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"stopped",' '"checkpoint_tso":' 449545421575684103, '"checkpoint_time":' '"2024-05-05' '11:28:10.459",' '"error":' null '}'
++ jq -r .state
+ state=stopped
+ [[ ! stopped == \s\t\o\p\p\e\d ]]
++ echo '{' '"upstream_id":' 7365352071413983013, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"stopped",' '"checkpoint_tso":' 449545421575684103, '"checkpoint_time":' '"2024-05-05' '11:28:10.459",' '"error":' null '}'
++ jq -r .error.message
+ message=null
+ [[ ! null =~ null ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.13222.out cli changefeed update --pd=http://127.0.0.1:2379,http://127.0.0.1:2679,http://127.0.0.1:2779 --config=/tmp/tidb_cdc_test/cli_with_auth/changefeed.toml --no-confirm --changefeed-id custom-changefeed-name
Diff of changefeed config:
{Type:update Path:[Config CaseSensitive] From:false To:true}
{Type:update Path:[Config SyncPointInterval] From:<nil> To:0xc0017a55e8}
{Type:update Path:[Config SyncPointRetention] From:<nil> To:0xc0017a5608}
{Type:update Path:[Config Consistent] From:<nil> To:0xc000f1c770}
{Type:update Path:[Config Scheduler EnableTableAcrossNodes] From:false To:true}
table mark.finish_mark_3 not exists for 46-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2e26440013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pvtgm-6sx1h, pid:11142, start at 2024-05-05 11:28:15.283549313 +0800 CST m=+5.047786545	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:15.289 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:15.249 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:15.249 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2e26440013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pvtgm-6sx1h, pid:11142, start at 2024-05-05 11:28:15.283549313 +0800 CST m=+5.047786545	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:15.289 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:15.249 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:15.249 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2e294c0003	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-pvtgm-6sx1h, pid:11222, start at 2024-05-05 11:28:15.445850406 +0800 CST m=+5.162275332	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:15.452 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:15.443 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:15.443 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/resourcecontrol/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/resourcecontrol/tiflash/log/error.log
arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/resourcecontrol/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/resourcecontrol/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/resourcecontrol/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
Update changefeed config successfully! 
ID: custom-changefeed-name
Info: {"upstream_id":7365352071413983013,"namespace":"default","id":"custom-changefeed-name","sink_uri":"kafka://127.0.0.1:9092/ticdc-cli-test-24081?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:28:03.291036993+08:00","start_ts":449545418075799553,"admin_job_type":1,"config":{"memory_quota":1073741824,"case_sensitive":true,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_table_monitor":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","encoder_concurrency":32,"terminator":"\r\n","enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":true,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"stopped","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":0,"checkpoint_ts":449545421575684103,"checkpoint_time":"2024-05-05 11:28:10.459"}
PASS
coverage: 2.8% of statements in github.com/pingcap/tiflow/...
table test.finish_mark not exists for 10-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 28-th check, retry later
+ set +x
[2024/05/05 11:28:09.018 +08:00] [INFO] [main.go:86] ["running ddl test: 0 createDropSchemaDDL"]
[2024/05/05 11:28:09.221 +08:00] [INFO] [main.go:220] ["1 insert success: 100"]
[2024/05/05 11:28:09.227 +08:00] [INFO] [main.go:220] ["0 insert success: 100"]
[2024/05/05 11:28:09.390 +08:00] [INFO] [main.go:220] ["1 insert success: 200"]
[2024/05/05 11:28:09.392 +08:00] [INFO] [main.go:234] ["1 delete success: 100"]
[2024/05/05 11:28:09.400 +08:00] [INFO] [main.go:220] ["0 insert success: 200"]
[2024/05/05 11:28:09.402 +08:00] [INFO] [main.go:234] ["0 delete success: 100"]
[2024/05/05 11:28:09.556 +08:00] [INFO] [main.go:220] ["1 insert success: 300"]
[2024/05/05 11:28:09.567 +08:00] [INFO] [main.go:220] ["0 insert success: 300"]
[2024/05/05 11:28:10.754 +08:00] [INFO] [main.go:220] ["1 insert success: 400"]
[2024/05/05 11:28:10.757 +08:00] [INFO] [main.go:234] ["1 delete success: 200"]
[2024/05/05 11:28:10.771 +08:00] [INFO] [main.go:220] ["0 insert success: 400"]
[2024/05/05 11:28:10.773 +08:00] [INFO] [main.go:234] ["0 delete success: 200"]
[2024/05/05 11:28:10.922 +08:00] [INFO] [main.go:220] ["1 insert success: 500"]
[2024/05/05 11:28:10.948 +08:00] [INFO] [main.go:220] ["0 insert success: 500"]
[2024/05/05 11:28:11.099 +08:00] [INFO] [main.go:220] ["1 insert success: 600"]
[2024/05/05 11:28:11.102 +08:00] [INFO] [main.go:234] ["1 delete success: 300"]
[2024/05/05 11:28:11.127 +08:00] [INFO] [main.go:220] ["0 insert success: 600"]
[2024/05/05 11:28:11.129 +08:00] [INFO] [main.go:234] ["0 delete success: 300"]
[2024/05/05 11:28:11.276 +08:00] [INFO] [main.go:220] ["1 insert success: 700"]
[2024/05/05 11:28:12.319 +08:00] [INFO] [main.go:220] ["0 insert success: 700"]
[2024/05/05 11:28:12.469 +08:00] [INFO] [main.go:220] ["1 insert success: 800"]
[2024/05/05 11:28:12.473 +08:00] [INFO] [main.go:234] ["1 delete success: 400"]
[2024/05/05 11:28:12.485 +08:00] [INFO] [main.go:220] ["0 insert success: 800"]
[2024/05/05 11:28:12.487 +08:00] [INFO] [main.go:234] ["0 delete success: 400"]
[2024/05/05 11:28:12.639 +08:00] [INFO] [main.go:220] ["1 insert success: 900"]
[2024/05/05 11:28:12.658 +08:00] [INFO] [main.go:220] ["0 insert success: 900"]
[2024/05/05 11:28:12.823 +08:00] [INFO] [main.go:220] ["1 insert success: 1000"]
[2024/05/05 11:28:12.828 +08:00] [INFO] [main.go:220] ["0 insert success: 1000"]
[2024/05/05 11:28:12.829 +08:00] [INFO] [main.go:234] ["1 delete success: 500"]
[2024/05/05 11:28:12.830 +08:00] [INFO] [main.go:234] ["0 delete success: 500"]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.13260.out cli changefeed --changefeed-id custom-changefeed-name resume
table mark.finish_mark_3 exists
table mark.finish_mark not exists for 1-th check, retry later
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:28:18 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/65ca6efa-97f2-411e-89ca-0b43fd8ba52f
	{"id":"65ca6efa-97f2-411e-89ca-0b43fd8ba52f","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879695}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cb42dbdc
	65ca6efa-97f2-411e-89ca-0b43fd8ba52f

/tidb/cdc/default/default/upstream/7365352143586810118
	{"id":7365352143586810118,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/65ca6efa-97f2-411e-89ca-0b43fd8ba52f
	{"id":"65ca6efa-97f2-411e-89ca-0b43fd8ba52f","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879695}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cb42dbdc
	65ca6efa-97f2-411e-89ca-0b43fd8ba52f

/tidb/cdc/default/default/upstream/7365352143586810118
	{"id":7365352143586810118,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/65ca6efa-97f2-411e-89ca-0b43fd8ba52f
	{"id":"65ca6efa-97f2-411e-89ca-0b43fd8ba52f","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879695}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cb42dbdc
	65ca6efa-97f2-411e-89ca-0b43fd8ba52f

/tidb/cdc/default/default/upstream/7365352143586810118
	{"id":7365352143586810118,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
Create changefeed successfully!
ID: 150b11d9-3f54-41e1-8725-5a17e9268b2d
Info: {"upstream_id":7365352143586810118,"namespace":"default","id":"150b11d9-3f54-41e1-8725-5a17e9268b2d","sink_uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-14891?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:28:18.658318406+08:00","start_ts":449545422477983745,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545422477983745,"checkpoint_ts":449545422477983745,"checkpoint_time":"2024-05-05 11:28:13.901"}
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resourcecontrol.cli.12621.out cli tso query --pd=http://127.0.0.1:2379
Verifying downstream PD is started...
[Sun May  5 11:28:18 CST 2024] <<<<<< START kafka consumer in common_1 case >>>>>>
table test.finish_mark not exists for 11-th check, retry later
PASS
coverage: 2.1% of statements in github.com/pingcap/tiflow/...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[2024/05/05 11:28:19.530 +08:00] [INFO] [main.go:86] ["running ddl test: 1 truncateDDL"]
[2024/05/05 11:28:19.737 +08:00] [INFO] [main.go:220] ["0 insert success: 100"]
[2024/05/05 11:28:19.745 +08:00] [INFO] [main.go:220] ["1 insert success: 100"]
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table test.finish not exists for 29-th check, retry later
[2024/05/05 11:28:19.921 +08:00] [INFO] [main.go:234] ["1 delete success: 100"]
[2024/05/05 11:28:19.922 +08:00] [INFO] [main.go:220] ["1 insert success: 200"]
[2024/05/05 11:28:19.925 +08:00] [INFO] [main.go:220] ["0 insert success: 200"]
[2024/05/05 11:28:19.927 +08:00] [INFO] [main.go:234] ["0 delete success: 100"]
[2024/05/05 11:28:20.104 +08:00] [INFO] [main.go:220] ["1 insert success: 300"]
[2024/05/05 11:28:20.119 +08:00] [INFO] [main.go:220] ["0 insert success: 300"]
[2024/05/05 11:28:20.279 +08:00] [INFO] [main.go:220] ["1 insert success: 400"]
[2024/05/05 11:28:20.282 +08:00] [INFO] [main.go:234] ["1 delete success: 200"]
[2024/05/05 11:28:20.289 +08:00] [INFO] [main.go:220] ["0 insert success: 400"]
table mark.finish_mark exists
+ set +x
[2024/05/05 11:28:20.291 +08:00] [INFO] [main.go:234] ["0 delete success: 200"]
[2024/05/05 11:28:20.455 +08:00] [INFO] [main.go:220] ["1 insert success: 500"]
[2024/05/05 11:28:20.468 +08:00] [INFO] [main.go:220] ["0 insert success: 500"]
+ set +x
+ tso='449545423775072257
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545423775072257 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
[Sun May  5 11:28:20 CST 2024] <<<<<< START cdc server in resourcecontrol case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resourcecontrol.1266312665.out server --log-file /tmp/tidb_cdc_test/resourcecontrol/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/resourcecontrol/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
[2024/05/05 11:28:20.630 +08:00] [INFO] [main.go:220] ["1 insert success: 600"]
[2024/05/05 11:28:20.632 +08:00] [INFO] [main.go:234] ["1 delete success: 300"]
[2024/05/05 11:28:20.645 +08:00] [INFO] [main.go:220] ["0 insert success: 600"]
[2024/05/05 11:28:20.647 +08:00] [INFO] [main.go:234] ["0 delete success: 300"]
[2024/05/05 11:28:20.810 +08:00] [INFO] [main.go:220] ["1 insert success: 700"]
check diff successfully
[2024/05/05 11:28:20.833 +08:00] [INFO] [main.go:220] ["0 insert success: 700"]
[2024/05/05 11:28:20.984 +08:00] [INFO] [main.go:220] ["1 insert success: 800"]
[2024/05/05 11:28:20.987 +08:00] [INFO] [main.go:234] ["1 delete success: 400"]
[2024/05/05 11:28:21.007 +08:00] [INFO] [main.go:220] ["0 insert success: 800"]
[2024/05/05 11:28:21.008 +08:00] [INFO] [main.go:234] ["0 delete success: 400"]
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2e679c0009	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-jb9b1-r2rkj, pid:7181, start at 2024-05-05 11:28:19.438938518 +0800 CST m=+21.764097985	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:19.444 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:19.431 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:19.431 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2e679c0009	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-jb9b1-r2rkj, pid:7181, start at 2024-05-05 11:28:19.438938518 +0800 CST m=+21.764097985	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:19.444 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:19.431 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:19.431 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2d98800015	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-jb9b1-r2rkj, pid:7264, start at 2024-05-05 11:28:06.220317082 +0800 CST m=+8.489802478	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:06.226 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:06.226 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:06.226 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/multi_rocks/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/multi_rocks/tiflash/log/error.log
arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_rocks/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_rocks/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_rocks/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2e56880002	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-703xz-4qz9g, pid:30979, start at 2024-05-05 11:28:18.340847859 +0800 CST m=+5.258135721	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:18.348 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:18.338 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:18.338 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2e56880002	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-703xz-4qz9g, pid:30979, start at 2024-05-05 11:28:18.340847859 +0800 CST m=+5.258135721	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:18.348 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:18.338 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:18.338 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2e573c000d	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-703xz-4qz9g, pid:31060, start at 2024-05-05 11:28:18.395987856 +0800 CST m=+5.263030319	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:18.402 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:18.383 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:18.383 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
[2024/05/05 11:28:21.162 +08:00] [INFO] [main.go:220] ["1 insert success: 900"]
[2024/05/05 11:28:21.177 +08:00] [INFO] [main.go:220] ["0 insert success: 900"]
table common_1.v1 not exists for 1-th check, retry later
table test.finish_mark not exists for 12-th check, retry later
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/autorandom/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/autorandom/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/autorandom/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/autorandom/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/autorandom/tiflash-proxy.toml"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
wait process cdc.test exit for 1-th time...
[2024/05/05 11:28:21.348 +08:00] [INFO] [main.go:220] ["1 insert success: 1000"]
[2024/05/05 11:28:21.355 +08:00] [INFO] [main.go:234] ["1 delete success: 500"]
[2024/05/05 11:28:21.355 +08:00] [INFO] [main.go:220] ["0 insert success: 1000"]
[2024/05/05 11:28:21.361 +08:00] [INFO] [main.go:234] ["0 delete success: 500"]
[2024/05/05 11:28:21.520 +08:00] [INFO] [main.go:220] ["1 insert success: 1100"]
[2024/05/05 11:28:21.524 +08:00] [INFO] [main.go:220] ["0 insert success: 1100"]
Starting Upstream TiDB...
wait process cdc.test exit for 2-th time...
[2024/05/05 11:28:21.683 +08:00] [INFO] [main.go:220] ["0 insert success: 1200"]
[2024/05/05 11:28:21.688 +08:00] [INFO] [main.go:234] ["0 delete success: 600"]
[2024/05/05 11:28:21.691 +08:00] [INFO] [main.go:220] ["1 insert success: 1200"]
[2024/05/05 11:28:21.697 +08:00] [INFO] [main.go:234] ["1 delete success: 600"]
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 30-th check, retry later
[2024/05/05 11:28:21.855 +08:00] [INFO] [main.go:220] ["0 insert success: 1300"]
[2024/05/05 11:28:21.862 +08:00] [INFO] [main.go:220] ["1 insert success: 1300"]
[2024/05/05 11:28:22.021 +08:00] [INFO] [main.go:220] ["0 insert success: 1400"]
[2024/05/05 11:28:22.026 +08:00] [INFO] [main.go:234] ["0 delete success: 700"]
[2024/05/05 11:28:22.037 +08:00] [INFO] [main.go:220] ["1 insert success: 1400"]
[2024/05/05 11:28:22.043 +08:00] [INFO] [main.go:234] ["1 delete success: 700"]
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only/run.sh: line 1: 13301 Killed                  cdc_kafka_consumer --upstream-uri $SINK_URI --downstream-uri="mysql://root@127.0.0.1:3306/?safe-mode=true&batch-dml-enable=false" --upstream-tidb-dsn="root@tcp(${UP_TIDB_HOST}:${UP_TIDB_PORT})/?" --config="$CUR/conf/changefeed.toml" 2>&1
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only_avro/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
[2024/05/05 11:28:22.188 +08:00] [INFO] [main.go:220] ["0 insert success: 1500"]
[2024/05/05 11:28:22.203 +08:00] [INFO] [main.go:220] ["1 insert success: 1500"]
[2024/05/05 11:28:22.359 +08:00] [INFO] [main.go:220] ["0 insert success: 1600"]
wait process cdc.test exit for 3-th time...
[2024/05/05 11:28:22.364 +08:00] [INFO] [main.go:234] ["0 delete success: 800"]
[2024/05/05 11:28:22.375 +08:00] [INFO] [main.go:220] ["1 insert success: 1600"]
[2024/05/05 11:28:22.381 +08:00] [INFO] [main.go:234] ["1 delete success: 800"]
[2024/05/05 11:28:22.533 +08:00] [INFO] [main.go:220] ["0 insert success: 1700"]
[2024/05/05 11:28:22.555 +08:00] [INFO] [main.go:220] ["1 insert success: 1700"]
[2024/05/05 11:28:22.715 +08:00] [INFO] [main.go:220] ["0 insert success: 1800"]
[2024/05/05 11:28:22.720 +08:00] [INFO] [main.go:234] ["0 delete success: 900"]
[2024/05/05 11:28:22.740 +08:00] [INFO] [main.go:220] ["1 insert success: 1800"]
[2024/05/05 11:28:22.747 +08:00] [INFO] [main.go:234] ["1 delete success: 900"]
cdc.test: no process found
wait process cdc.test exit for 4-th time...
process cdc.test already exit
[Sun May  5 11:28:22 CST 2024] <<<<<< run test case default_value success! >>>>>>
[Sun May  5 11:28:22 CST 2024] <<<<<< START cdc server in multi_rocks case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_rocks.2014320145.out server --log-file /tmp/tidb_cdc_test/multi_rocks/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_rocks/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
[2024/05/05 11:28:22.897 +08:00] [INFO] [main.go:220] ["0 insert success: 1900"]
[2024/05/05 11:28:22.929 +08:00] [INFO] [main.go:220] ["1 insert success: 1900"]
[2024/05/05 11:28:23.076 +08:00] [INFO] [main.go:220] ["0 insert success: 2000"]
[2024/05/05 11:28:23.082 +08:00] [INFO] [main.go:234] ["0 delete success: 1000"]
table common_1.v1 not exists for 2-th check, retry later
table test.finish_mark not exists for 13-th check, retry later
[2024/05/05 11:28:23.146 +08:00] [INFO] [main.go:220] ["1 insert success: 2000"]
[2024/05/05 11:28:23.153 +08:00] [INFO] [main.go:234] ["1 delete success: 1000"]
[2024/05/05 11:28:23.302 +08:00] [INFO] [main.go:220] ["0 insert success: 2100"]
[2024/05/05 11:28:23.328 +08:00] [INFO] [main.go:220] ["1 insert success: 2100"]
[Sun May  5 11:28:23 CST 2024] <<<<<< START cdc server in autorandom case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ GO_FAILPOINTS=
+ (( i = 0 ))
+ (( i <= 50 ))
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.autorandom.3241332415.out server --log-file /tmp/tidb_cdc_test/autorandom/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/autorandom/cdc_data --cluster-id default
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
[2024/05/05 11:28:23.485 +08:00] [INFO] [main.go:220] ["0 insert success: 2200"]
[2024/05/05 11:28:23.490 +08:00] [INFO] [main.go:234] ["0 delete success: 1100"]
[2024/05/05 11:28:23.511 +08:00] [INFO] [main.go:220] ["1 insert success: 2200"]
[2024/05/05 11:28:23.518 +08:00] [INFO] [main.go:234] ["1 delete success: 1100"]
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=custom-changefeed-name
+ expected_state=normal
+ error_msg=null
+ tls_dir=
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c custom-changefeed-name -s
+ info='{
  "upstream_id": 7365352071413983013,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "normal",
  "checkpoint_tso": 449545424721149960,
  "checkpoint_time": "2024-05-05 11:28:22.458",
  "error": null
}'
+ echo '{
  "upstream_id": 7365352071413983013,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "normal",
  "checkpoint_tso": 449545424721149960,
  "checkpoint_time": "2024-05-05 11:28:22.458",
  "error": null
}'
{
  "upstream_id": 7365352071413983013,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "normal",
  "checkpoint_tso": 449545424721149960,
  "checkpoint_time": "2024-05-05 11:28:22.458",
  "error": null
}
++ echo '{' '"upstream_id":' 7365352071413983013, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449545424721149960, '"checkpoint_time":' '"2024-05-05' '11:28:22.458",' '"error":' null '}'
++ jq -r .state
+ state=normal
+ [[ ! normal == \n\o\r\m\a\l ]]
++ echo '{' '"upstream_id":' 7365352071413983013, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449545424721149960, '"checkpoint_time":' '"2024-05-05' '11:28:22.458",' '"error":' null '}'
++ jq -r .error.message
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:28:23 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/6f0d35a3-2112-4334-aad8-830b1cbeb82f
	{"id":"6f0d35a3-2112-4334-aad8-830b1cbeb82f","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879700}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cb6218cd
	6f0d35a3-2112-4334-aad8-830b1cbeb82f

/tidb/cdc/default/default/upstream/7365352174243751995
	{"id":7365352174243751995,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/6f0d35a3-2112-4334-aad8-830b1cbeb82f
	{"id":"6f0d35a3-2112-4334-aad8-830b1cbeb82f","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879700}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cb6218cd
	6f0d35a3-2112-4334-aad8-830b1cbeb82f

/tidb/cdc/default/default/upstream/7365352174243751995
	{"id":7365352174243751995,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/6f0d35a3-2112-4334-aad8-830b1cbeb82f
	{"id":"6f0d35a3-2112-4334-aad8-830b1cbeb82f","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879700}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cb6218cd
	6f0d35a3-2112-4334-aad8-830b1cbeb82f

/tidb/cdc/default/default/upstream/7365352174243751995
	{"id":7365352174243751995,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.resourcecontrol.cli.12716.out cli changefeed create --start-ts=449545423775072257 '--sink-uri=kafka://127.0.0.1:9092/ticdc-resourcecontrol-test-4241?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760'
Create changefeed successfully!
ID: 9445ce75-205f-4b6d-bf83-c42b7ea7f9f2
Info: {"upstream_id":7365352174243751995,"namespace":"default","id":"9445ce75-205f-4b6d-bf83-c42b7ea7f9f2","sink_uri":"kafka://127.0.0.1:9092/ticdc-resourcecontrol-test-4241?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:28:23.825999389+08:00","start_ts":449545423775072257,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545423775072257,"checkpoint_ts":449545423775072257,"checkpoint_time":"2024-05-05 11:28:18.849"}
PASS
table test.finish not exists for 31-th check, retry later
[2024/05/05 11:28:23.663 +08:00] [INFO] [main.go:220] ["0 insert success: 2300"]
[2024/05/05 11:28:23.691 +08:00] [INFO] [main.go:220] ["1 insert success: 2300"]
[2024/05/05 11:28:23.837 +08:00] [INFO] [main.go:220] ["0 insert success: 2400"]
[2024/05/05 11:28:23.845 +08:00] [INFO] [main.go:234] ["0 delete success: 1200"]
[2024/05/05 11:28:23.875 +08:00] [INFO] [main.go:220] ["1 insert success: 2400"]
[2024/05/05 11:28:23.885 +08:00] [INFO] [main.go:234] ["1 delete success: 1200"]
+ message=null
+ [[ ! null =~ null ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.13349.out cli changefeed --changefeed-id custom-changefeed-name remove
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
[2024/05/05 11:28:24.014 +08:00] [INFO] [main.go:220] ["0 insert success: 2500"]
[2024/05/05 11:28:24.054 +08:00] [INFO] [main.go:220] ["1 insert success: 2500"]
[2024/05/05 11:28:24.192 +08:00] [INFO] [main.go:220] ["0 insert success: 2600"]
[2024/05/05 11:28:24.200 +08:00] [INFO] [main.go:234] ["0 delete success: 1300"]
[2024/05/05 11:28:24.231 +08:00] [INFO] [main.go:220] ["1 insert success: 2600"]
[2024/05/05 11:28:24.240 +08:00] [INFO] [main.go:234] ["1 delete success: 1300"]
[2024/05/05 11:28:24.367 +08:00] [INFO] [main.go:220] ["0 insert success: 2700"]
Changefeed remove successfully.
ID: custom-changefeed-name
CheckpointTs: 449545424983293958
SinkURI: kafka://127.0.0.1:9092/ticdc-cli-test-24081?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760
PASS
[2024/05/05 11:28:24.412 +08:00] [INFO] [main.go:220] ["1 insert success: 2700"]
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
table common_1.v1 exists
table common_1.recover_and_insert not exists for 1-th check, retry later
pass check, checkpoint tso not forward after 10s
run task successfully
wait process 8780 exit for 1-th time...
wait process 8780 exit for 2-th time...
wait process 8780 exit for 3-th time...
wait process 8780 exit for 4-th time...
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_utils/kill_cdc_pid: line 19: kill: (8780) - No such process
wait process 8780 exit for 5-th time...
process 8780 already exit
[Sun May  5 11:28:19 CST 2024] <<<<<< START cdc server in ddl_only_block_related_table case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_only_block_related_table.95389540.out server --log-file /tmp/tidb_cdc_test/ddl_only_block_related_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_only_block_related_table/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:28:22 GMT
< Content-Type: text/plain; charset=utf-8
< Transfer-Encoding: chunked
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:

changefeedID: default/ddl-only-block-related-table
{UpstreamID:7365351800315892975 Namespace:default ID:ddl-only-block-related-table SinkURI:kafka://127.0.0.1:9092/ticdc-common-1-test-23151?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-05 11:26:55.981071212 +0800 CST StartTs:449545402007420932 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc000f25b00 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545402047004675}
{CheckpointTs:449545405939580939 MinTableBarrierTs:449545424368304139 AdminJobType:noop}
span: {table_id:112,start_key:7480000000000000ff705f720000000000fa,end_key:7480000000000000ff705f730000000000fa}, resolvedTs: 449545405939580939, checkpointTs: 449545405939580939, state: Preparing
span: {table_id:108,start_key:7480000000000000ff6c5f720000000000fa,end_key:7480000000000000ff6c5f730000000000fa}, resolvedTs: 449545405939580939, checkpointTs: 449545405939580939, state: Preparing
span: {table_id:110,start_key:7480000000000000ff6e5f720000000000fa,end_key:7480000000000000ff6e5f730000000000fa}, resolvedTs: 449545405939580939, checkpointTs: 449545405939580939, state: Preparing
span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449545405939580939, checkpointTs: 449545405939580939, state: Preparing



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/9a202748-47f8-47ab-b148-83d4d0a904c6
	{"id":"9a202748-47f8-47ab-b148-83d4d0a904c6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879699}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca1528b9
	9a202748-47f8-47ab-b148-83d4d0a904c6

/tidb/cdc/default/default/changefeed/info/ddl-only-block-related-table
	{"upstream-id":7365351800315892975,"namespace":"default","changefeed-id":"ddl-only-block-related-table","sink-uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-23151?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:26:55.981071212+08:00","start-ts":449545402007420932,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545402047004675}

/tidb/cdc/default/default/changefeed/status/ddl-only-block-related-table
	{"checkpoint-ts":449545405939580939,"min-table-barrier-ts":449545424630448142,"admin-job-type":0}

/tidb/cdc/default/default/task/position/9a202748-47f8-47ab-b148-83d4d0a904c6/ddl-only-block-related-table
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/upstream/7365351800315892975
	{"id":7365351800315892975,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:

changefeedID: default/ddl-only-block-related-table
{UpstreamID:7365351800315892975 Namespace:default ID:ddl-only-block-related-table SinkURI:kafka://127.0.0.1:9092/ticdc-common-1-test-23151?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-05 11:26:55.981071212 +0800 CST StartTs:449545402007420932 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc000f25b00 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545402047004675}
{CheckpointTs:449545405939580939 MinTableBarrierTs:449545424368304139 AdminJobType:noop}
span: {table_id:112,start_key:7480000000000000ff705f720000000000fa,end_key:7480000000000000ff705f730000000000fa}, resolvedTs: 449545405939580939, checkpointTs: 449545405939580939, state: Preparing
span: {table_id:108,start_key:7480000000000000ff6c5f720000000000fa,end_key:7480000000000000ff6c5f730000000000fa}, resolvedTs: 449545405939580939, checkpointTs: 449545405939580939, state: Preparing
span: {table_id:110,start_key:7480000000000000ff6e5f720000000000fa,end_key:7480000000000000ff6e5f730000000000fa}, resolvedTs: 449545405939580939, checkpointTs: 449545405939580939, state: Preparing
span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449545405939580939, checkpointTs: 449545405939580939, state: Preparing



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/9a202748-47f8-47ab-b148-83d4d0a904c6
	{"id":"9a202748-47f8-47ab-b148-83d4d0a904c6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879699}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca1528b9
	9a202748-47f8-47ab-b148-83d4d0a904c6

/tidb/cdc/default/default/changefeed/info/ddl-only-block-related-table
table test.finish_mark not exists for 14-th check, retry later
	{"upstream-id":7365351800315892975,"namespace":"default","changefeed-id":"ddl-only-block-related-table","sink-uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-23151?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:26:55.981071212+08:00","start-ts":449545402007420932,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545402047004675}

/tidb/cdc/default/default/changefeed/status/ddl-only-block-related-table
	{"checkpoint-ts":449545405939580939,"min-table-barrier-ts":449545424630448142,"admin-job-type":0}

/tidb/cdc/default/default/task/position/9a202748-47f8-47ab-b148-83d4d0a904c6/ddl-only-block-related-table
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/upstream/7365351800315892975
	{"id":7365351800315892975,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ echo '

*** owner info ***:



*** processors info ***:

changefeedID: default/ddl-only-block-related-table
{UpstreamID:7365351800315892975 Namespace:default ID:ddl-only-block-related-table SinkURI:kafka://127.0.0.1:9092/ticdc-common-1-test-23151?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760 CreateTime:2024-05-05 11:26:55.981071212 +0800 CST StartTs:449545402007420932 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc000f25b00 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545402047004675}
{CheckpointTs:449545405939580939 MinTableBarrierTs:449545424368304139 AdminJobType:noop}
span: {table_id:112,start_key:7480000000000000ff705f720000000000fa,end_key:7480000000000000ff705f730000000000fa}, resolvedTs: 449545405939580939, checkpointTs: 449545405939580939, state: Preparing
span: {table_id:108,start_key:7480000000000000ff6c5f720000000000fa,end_key:7480000000000000ff6c5f730000000000fa}, resolvedTs: 449545405939580939, checkpointTs: 449545405939580939, state: Preparing
span: {table_id:110,start_key:7480000000000000ff6e5f720000000000fa,end_key:7480000000000000ff6e5f730000000000fa}, resolvedTs: 449545405939580939, checkpointTs: 449545405939580939, state: Preparing
span: {table_id:106,start_key:7480000000000000ff6a5f720000000000fa,end_key:7480000000000000ff6a5f730000000000fa}, resolvedTs: 449545405939580939, checkpointTs: 449545405939580939, state: Preparing



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/9a202748-47f8-47ab-b148-83d4d0a904c6
	{"id":"9a202748-47f8-47ab-b148-83d4d0a904c6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879699}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ca1528b9
	9a202748-47f8-47ab-b148-83d4d0a904c6

/tidb/cdc/default/default/changefeed/info/ddl-only-block-related-table
	{"upstream-id":7365351800315892975,"namespace":"default","changefeed-id":"ddl-only-block-related-table","sink-uri":"kafka://127.0.0.1:9092/ticdc-common-1-test-23151?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create-time":"2024-05-05T11:26:55.981071212+08:00","start-ts":449545402007420932,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545402047004675}

/tidb/cdc/default/default/changefeed/status/ddl-only-block-related-table
	{"checkpoint-ts":449545405939580939,"min-table-barrier-ts":449545424630448142,"admin-job-type":0}

/tidb/cdc/default/default/task/position/9a202748-47f8-47ab-b148-83d4d0a904c6/ddl-only-block-related-table
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/upstream/7365351800315892975
	{"id":7365351800315892975,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ break
+ set +x
check diff failed 1-th time, retry later
check diff successfully
check_ts_forward ddl-only-block-related-table
+ set +x
[Sun May  5 11:28:25 CST 2024] <<<<<< START kafka consumer in resourcecontrol case >>>>>>
table test.finish not exists for 32-th check, retry later
+ set +x
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:28:26 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/6360c5c7-1463-4a7a-b8b7-719be7d588a2
	{"id":"6360c5c7-1463-4a7a-b8b7-719be7d588a2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879703}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cb2d17ea
	6360c5c7-1463-4a7a-b8b7-719be7d588a2

/tidb/cdc/default/default/upstream/7365352119765152926
	{"id":7365352119765152926,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/6360c5c7-1463-4a7a-b8b7-719be7d588a2
	{"id":"6360c5c7-1463-4a7a-b8b7-719be7d588a2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879703}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cb2d17ea
	6360c5c7-1463-4a7a-b8b7-719be7d588a2

/tidb/cdc/default/default/upstream/7365352119765152926
	{"id":7365352119765152926,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/6360c5c7-1463-4a7a-b8b7-719be7d588a2
	{"id":"6360c5c7-1463-4a7a-b8b7-719be7d588a2","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879703}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cb2d17ea
	6360c5c7-1463-4a7a-b8b7-719be7d588a2

/tidb/cdc/default/default/upstream/7365352119765152926
	{"id":7365352119765152926,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_rocks.cli.21143.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-multi-rocks-test-13443?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760'
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
changefeed is working normally rts: 449545425154998300->449545425417142300 checkpoint: 449545425154998300->449545425417142300
run task successfully
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:28:26 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/fdf53b0b-e3db-4c80-97a0-806c0ef66526
	{"id":"fdf53b0b-e3db-4c80-97a0-806c0ef66526","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879703}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cb6948cc
	fdf53b0b-e3db-4c80-97a0-806c0ef66526

/tidb/cdc/default/default/upstream/7365352182095606375
	{"id":7365352182095606375,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/fdf53b0b-e3db-4c80-97a0-806c0ef66526
	{"id":"fdf53b0b-e3db-4c80-97a0-806c0ef66526","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879703}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cb6948cc
	fdf53b0b-e3db-4c80-97a0-806c0ef66526

/tidb/cdc/default/default/upstream/7365352182095606375
	{"id":7365352182095606375,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/fdf53b0b-e3db-4c80-97a0-806c0ef66526
	{"id":"fdf53b0b-e3db-4c80-97a0-806c0ef66526","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879703}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cb6948cc
	fdf53b0b-e3db-4c80-97a0-806c0ef66526

/tidb/cdc/default/default/upstream/7365352182095606375
	{"id":7365352182095606375,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
Create changefeed successfully!
ID: f7c187cc-8d1a-4105-a718-1a3eae066efc
Info: {"upstream_id":7365352182095606375,"namespace":"default","id":"f7c187cc-8d1a-4105-a718-1a3eae066efc","sink_uri":"kafka://127.0.0.1:9092/ticdc-autorandom-test-26871?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:28:26.695766791+08:00","start_ts":449545425790697476,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545425790697476,"checkpoint_ts":449545425790697476,"checkpoint_time":"2024-05-05 11:28:26.538"}
[Sun May  5 11:28:26 CST 2024] <<<<<< START kafka consumer in autorandom case >>>>>>
Create changefeed successfully!
ID: d63b456c-7fd0-4bf4-bf4a-2a97e48003a5
Info: {"upstream_id":7365352119765152926,"namespace":"default","id":"d63b456c-7fd0-4bf4-bf4a-2a97e48003a5","sink_uri":"kafka://127.0.0.1:9092/ticdc-multi-rocks-test-13443?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:28:26.484726703+08:00","start_ts":449545425736171525,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545425736171525,"checkpoint_ts":449545425736171525,"checkpoint_time":"2024-05-05 11:28:26.330"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
table common_1.recover_and_insert not exists for 2-th check, retry later
table test.finish_mark not exists for 15-th check, retry later
wait process cdc.test exit for 1-th time...
wait process cdc.test exit for 2-th time...
table resourcecontrol.finish_mark not exists for 1-th check, retry later
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:28:27 CST 2024] <<<<<< run test case ddl_only_block_related_table success! >>>>>>
+ set +x
[Sun May  5 11:28:27 CST 2024] <<<<<< START kafka consumer in multi_rocks case >>>>>>
table autorandom_test.table_a not exists for 1-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2edb1c0013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-rdz18-z9rmg, pid:4320, start at 2024-05-05 11:28:26.867012781 +0800 CST m=+5.168875439	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:26.876 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:26.874 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:26.874 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2edb1c0013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-rdz18-z9rmg, pid:4320, start at 2024-05-05 11:28:26.867012781 +0800 CST m=+5.168875439	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:26.876 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:26.874 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:26.874 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2edd50000e	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-rdz18-z9rmg, pid:4403, start at 2024-05-05 11:28:26.977425257 +0800 CST m=+5.227340068	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:26.983 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:26.964 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:26.964 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/force_replicate_table/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/force_replicate_table/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/force_replicate_table/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/force_replicate_table/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/force_replicate_table/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table test.finish not exists for 33-th check, retry later
***************** properties *****************
"mysql.user"="root"
"operationcount"="0"
"mysql.host"="127.0.0.1"
"requestdistribution"="uniform"
"mysql.port"="4000"
"mysql.db"="multi_rocks"
"table"="a1"
"insertproportion"="0"
"recordcount"="1000"
"threadcount"="2"
"readallfields"="true"
"updateproportion"="0"
"dotransactions"="false"
"workload"="core"
"scanproportion"="0"
"readproportion"="0"
**********************************************
table common_1.recover_and_insert not exists for 3-th check, retry later
table test.finish_mark not exists for 16-th check, retry later
changefeed count 0 check pass, pd_addr: http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.13438.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-cli-test-24081?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --tz=Asia/Shanghai -c=custom-changefeed-name
[WARN] --tz is deprecated in changefeed settings.
Create changefeed successfully!
ID: custom-changefeed-name
Info: {"upstream_id":7365352071413983013,"namespace":"default","id":"custom-changefeed-name","sink_uri":"kafka://127.0.0.1:9092/ticdc-cli-test-24081?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:28:29.556999428+08:00","start_ts":449545426556420098,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545426556420098,"checkpoint_ts":449545426556420098,"checkpoint_time":"2024-05-05 11:28:29.459"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
table autorandom_test.table_a exists
check diff failed 1-th time, retry later
table test.finish not exists for 34-th check, retry later
start tidb cluster in /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
Run finished, takes 2.116336119s
INSERT - Takes(s): 2.1, Count: 1000, OPS: 476.7, Avg(us): 4195, Min(us): 829, Max(us): 1497477, 95th(us): 2000, 99th(us): 3000
table resourcecontrol.finish_mark exists
check diff successfully
wait process cdc.test exit for 1-th time...
***************** properties *****************
"workload"="core"
"mysql.port"="4000"
"mysql.db"="multi_rocks"
"insertproportion"="0"
"mysql.host"="127.0.0.1"
"mysql.user"="root"
"threadcount"="2"
"readproportion"="0"
"operationcount"="0"
"scanproportion"="0"
"table"="a2"
"requestdistribution"="uniform"
"readallfields"="true"
"dotransactions"="false"
"updateproportion"="0"
"recordcount"="1000"
**********************************************
[Sun May  5 11:28:30 CST 2024] <<<<<< START cdc server in force_replicate_table case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ GO_FAILPOINTS=
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.force_replicate_table.58765878.out server --log-file /tmp/tidb_cdc_test/force_replicate_table/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/force_replicate_table/cdc_data --cluster-id default
+ [[ no != \n\o ]]
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
wait process cdc.test exit for 2-th time...
Run finished, takes 583.824242ms
INSERT - Takes(s): 0.6, Count: 1000, OPS: 1772.8, Avg(us): 1135, Min(us): 825, Max(us): 19646, 95th(us): 2000, 99th(us): 2000
[2024/05/05 11:28:30.431 +08:00] [INFO] [main.go:86] ["running ddl test: 2 addDropColumnDDL"]
[2024/05/05 11:28:30.629 +08:00] [INFO] [main.go:220] ["0 insert success: 100"]
[2024/05/05 11:28:30.633 +08:00] [INFO] [main.go:220] ["1 insert success: 100"]
[2024/05/05 11:28:30.795 +08:00] [INFO] [main.go:220] ["0 insert success: 200"]
[2024/05/05 11:28:30.797 +08:00] [INFO] [main.go:234] ["0 delete success: 100"]
[2024/05/05 11:28:30.806 +08:00] [INFO] [main.go:234] ["1 delete success: 100"]
[2024/05/05 11:28:30.807 +08:00] [INFO] [main.go:220] ["1 insert success: 200"]
[2024/05/05 11:28:30.976 +08:00] [INFO] [main.go:220] ["0 insert success: 300"]
[2024/05/05 11:28:30.988 +08:00] [INFO] [main.go:220] ["1 insert success: 300"]
[2024/05/05 11:28:31.153 +08:00] [INFO] [main.go:220] ["0 insert success: 400"]
[2024/05/05 11:28:31.155 +08:00] [INFO] [main.go:234] ["0 delete success: 200"]
[2024/05/05 11:28:31.168 +08:00] [INFO] [main.go:234] ["1 delete success: 200"]
[2024/05/05 11:28:31.169 +08:00] [INFO] [main.go:220] ["1 insert success: 400"]
+ set +x
[2024/05/05 11:28:31.328 +08:00] [INFO] [main.go:220] ["0 insert success: 500"]
[2024/05/05 11:28:31.355 +08:00] [INFO] [main.go:220] ["1 insert success: 500"]
table common_1.recover_and_insert exists
table common_1.finish_mark not exists for 1-th check, retry later
table test.finish_mark not exists for 17-th check, retry later
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:28:31 CST 2024] <<<<<< run test case resourcecontrol success! >>>>>>
[2024/05/05 11:28:31.495 +08:00] [INFO] [main.go:220] ["0 insert success: 600"]
[2024/05/05 11:28:31.497 +08:00] [INFO] [main.go:234] ["0 delete success: 300"]
[2024/05/05 11:28:31.530 +08:00] [INFO] [main.go:234] ["1 delete success: 300"]
[2024/05/05 11:28:31.531 +08:00] [INFO] [main.go:220] ["1 insert success: 600"]
[2024/05/05 11:28:31.694 +08:00] [INFO] [main.go:220] ["0 insert success: 700"]
[2024/05/05 11:28:31.738 +08:00] [INFO] [main.go:220] ["1 insert success: 700"]
[2024/05/05 11:28:31.878 +08:00] [INFO] [main.go:220] ["0 insert success: 800"]
[2024/05/05 11:28:31.880 +08:00] [INFO] [main.go:234] ["0 delete success: 400"]
[2024/05/05 11:28:31.920 +08:00] [INFO] [main.go:234] ["1 delete success: 400"]
[2024/05/05 11:28:31.921 +08:00] [INFO] [main.go:220] ["1 insert success: 800"]
***************** properties *****************
"requestdistribution"="uniform"
"mysql.db"="multi_rocks"
"updateproportion"="0"
"insertproportion"="0"
"recordcount"="1000"
"mysql.host"="127.0.0.1"
"workload"="core"
"mysql.port"="4000"
"threadcount"="2"
"readproportion"="0"
"scanproportion"="0"
"readallfields"="true"
"mysql.user"="root"
"operationcount"="0"
"table"="a3"
"dotransactions"="false"
**********************************************
table test.finish not exists for 35-th check, retry later
[2024/05/05 11:28:32.054 +08:00] [INFO] [main.go:220] ["0 insert success: 900"]
[2024/05/05 11:28:32.096 +08:00] [INFO] [main.go:220] ["1 insert success: 900"]
[2024/05/05 11:28:32.225 +08:00] [INFO] [main.go:220] ["0 insert success: 1000"]
[2024/05/05 11:28:32.227 +08:00] [INFO] [main.go:234] ["0 delete success: 500"]
[2024/05/05 11:28:32.272 +08:00] [INFO] [main.go:234] ["1 delete success: 500"]
[2024/05/05 11:28:32.272 +08:00] [INFO] [main.go:220] ["1 insert success: 1000"]
[2024/05/05 11:28:32.396 +08:00] [INFO] [main.go:220] ["0 insert success: 1100"]
[2024/05/05 11:28:32.450 +08:00] [INFO] [main.go:220] ["1 insert success: 1100"]
check diff successfully
wait process cdc.test exit for 1-th time...
Run finished, takes 566.811654ms
INSERT - Takes(s): 0.5, Count: 1000, OPS: 1819.4, Avg(us): 1100, Min(us): 717, Max(us): 17117, 95th(us): 2000, 99th(us): 2000
***************** properties *****************
"table"="a4"
"requestdistribution"="uniform"
"mysql.port"="4000"
"mysql.host"="127.0.0.1"
"workload"="core"
"recordcount"="1000"
"scanproportion"="0"
"operationcount"="0"
"readproportion"="0"
"dotransactions"="false"
"mysql.db"="multi_rocks"
"mysql.user"="root"
"threadcount"="2"
"readallfields"="true"
"insertproportion"="0"
"updateproportion"="0"
**********************************************
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
[2024/05/05 11:28:32.559 +08:00] [INFO] [main.go:220] ["0 insert success: 1200"]
[2024/05/05 11:28:32.561 +08:00] [INFO] [main.go:234] ["0 delete success: 600"]
[2024/05/05 11:28:32.624 +08:00] [INFO] [main.go:234] ["1 delete success: 600"]
[2024/05/05 11:28:32.625 +08:00] [INFO] [main.go:220] ["1 insert success: 1200"]
[2024/05/05 11:28:32.736 +08:00] [INFO] [main.go:220] ["0 insert success: 1300"]
[2024/05/05 11:28:32.802 +08:00] [INFO] [main.go:220] ["1 insert success: 1300"]
[2024/05/05 11:28:32.910 +08:00] [INFO] [main.go:220] ["0 insert success: 1400"]
[2024/05/05 11:28:32.912 +08:00] [INFO] [main.go:234] ["0 delete success: 700"]
[2024/05/05 11:28:32.979 +08:00] [INFO] [main.go:234] ["1 delete success: 700"]
[2024/05/05 11:28:32.980 +08:00] [INFO] [main.go:220] ["1 insert success: 1400"]
wait process cdc.test exit for 2-th time...
Run finished, takes 633.700722ms
INSERT - Takes(s): 0.6, Count: 1000, OPS: 1620.5, Avg(us): 1231, Min(us): 785, Max(us): 33088, 95th(us): 2000, 99th(us): 3000
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:28:33 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
[2024/05/05 11:28:33.076 +08:00] [INFO] [main.go:220] ["0 insert success: 1500"]
[2024/05/05 11:28:33.162 +08:00] [INFO] [main.go:220] ["1 insert success: 1500"]
[2024/05/05 11:28:33.237 +08:00] [INFO] [main.go:220] ["0 insert success: 1600"]
[2024/05/05 11:28:33.239 +08:00] [INFO] [main.go:234] ["0 delete success: 800"]
***************** properties *****************
"readallfields"="true"
"threadcount"="2"
"mysql.host"="127.0.0.1"
"scanproportion"="0"
"table"="a5"
"operationcount"="0"
"requestdistribution"="uniform"
"workload"="core"
"updateproportion"="0"
"mysql.port"="4000"
"dotransactions"="false"
"mysql.db"="multi_rocks"
"readproportion"="0"
"recordcount"="1000"
"insertproportion"="0"
"mysql.user"="root"
**********************************************
table common_1.finish_mark not exists for 2-th check, retry later
table test.finish_mark exists
check diff successfully
[2024/05/05 11:28:33.339 +08:00] [INFO] [main.go:234] ["1 delete success: 800"]
[2024/05/05 11:28:33.340 +08:00] [INFO] [main.go:220] ["1 insert success: 1600"]
[2024/05/05 11:28:33.408 +08:00] [INFO] [main.go:220] ["0 insert success: 1700"]
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:28:33 CST 2024] <<<<<< run test case autorandom success! >>>>>>
wait process cdc.test exit for 1-th time...
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/5c00f14a-0ac6-4763-b88f-cd66ccb27c22
	{"id":"5c00f14a-0ac6-4763-b88f-cd66ccb27c22","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879710}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cb8acfc7
	5c00f14a-0ac6-4763-b88f-cd66ccb27c22

/tidb/cdc/default/default/upstream/7365352215633795226
	{"id":7365352215633795226,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/5c00f14a-0ac6-4763-b88f-cd66ccb27c22
	{"id":"5c00f14a-0ac6-4763-b88f-cd66ccb27c22","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879710}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cb8acfc7
	5c00f14a-0ac6-4763-b88f-cd66ccb27c22

/tidb/cdc/default/default/upstream/7365352215633795226
	{"id":7365352215633795226,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/5c00f14a-0ac6-4763-b88f-cd66ccb27c22
	{"id":"5c00f14a-0ac6-4763-b88f-cd66ccb27c22","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879710}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cb8acfc7
	5c00f14a-0ac6-4763-b88f-cd66ccb27c22

/tidb/cdc/default/default/upstream/7365352215633795226
	{"id":7365352215633795226,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
[2024/05/05 11:28:33.518 +08:00] [INFO] [main.go:220] ["1 insert success: 1700"]
[2024/05/05 11:28:33.578 +08:00] [INFO] [main.go:220] ["0 insert success: 1800"]
[2024/05/05 11:28:33.580 +08:00] [INFO] [main.go:234] ["0 delete success: 900"]
[2024/05/05 11:28:33.705 +08:00] [INFO] [main.go:234] ["1 delete success: 900"]
[2024/05/05 11:28:33.706 +08:00] [INFO] [main.go:220] ["1 insert success: 1800"]
[2024/05/05 11:28:33.767 +08:00] [INFO] [main.go:220] ["0 insert success: 1900"]
table test.finish not exists for 36-th check, retry later
Run finished, takes 548.087555ms
INSERT - Takes(s): 0.5, Count: 1000, OPS: 1882.1, Avg(us): 1063, Min(us): 755, Max(us): 16720, 95th(us): 2000, 99th(us): 2000
[2024/05/05 11:28:33.886 +08:00] [INFO] [main.go:220] ["1 insert success: 1900"]
[2024/05/05 11:28:33.929 +08:00] [INFO] [main.go:220] ["0 insert success: 2000"]
[2024/05/05 11:28:33.931 +08:00] [INFO] [main.go:234] ["0 delete success: 1000"]
wait process cdc.test exit for 2-th time...
[2024/05/05 11:28:34.063 +08:00] [INFO] [main.go:234] ["1 delete success: 1000"]
[2024/05/05 11:28:34.063 +08:00] [INFO] [main.go:220] ["1 insert success: 2000"]
[2024/05/05 11:28:34.094 +08:00] [INFO] [main.go:220] ["0 insert success: 2100"]
[2024/05/05 11:28:34.240 +08:00] [INFO] [main.go:220] ["1 insert success: 2100"]
[2024/05/05 11:28:34.254 +08:00] [INFO] [main.go:220] ["0 insert success: 2200"]
[2024/05/05 11:28:34.256 +08:00] [INFO] [main.go:234] ["0 delete success: 1100"]
Create changefeed successfully!
ID: db914afa-8f13-4023-a404-db11d4ee0cf8
Info: {"upstream_id":7365352215633795226,"namespace":"default","id":"db914afa-8f13-4023-a404-db11d4ee0cf8","sink_uri":"kafka://127.0.0.1:9092/ticdc-force_replicate_table-test-32701?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:28:34.276714525+08:00","start_ts":449545426717638657,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":true,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545426717638657,"checkpoint_ts":449545426717638657,"checkpoint_time":"2024-05-05 11:28:30.074"}
[Sun May  5 11:28:34 CST 2024] <<<<<< START kafka consumer in force_replicate_table case >>>>>>
consumer replica config found: /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/force_replicate_table/conf/changefeed.toml
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=custom-changefeed-name
+ expected_state=normal
+ error_msg=null
+ tls_dir=
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c custom-changefeed-name -s
+ info='{
  "upstream_id": 7365352071413983013,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "normal",
  "checkpoint_tso": 449545427617841154,
  "checkpoint_time": "2024-05-05 11:28:33.508",
  "error": null
}'
+ echo '{
  "upstream_id": 7365352071413983013,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "normal",
  "checkpoint_tso": 449545427617841154,
  "checkpoint_time": "2024-05-05 11:28:33.508",
  "error": null
}'
{
  "upstream_id": 7365352071413983013,
  "namespace": "default",
  "id": "custom-changefeed-name",
  "state": "normal",
  "checkpoint_tso": 449545427617841154,
  "checkpoint_time": "2024-05-05 11:28:33.508",
  "error": null
}
++ echo '{' '"upstream_id":' 7365352071413983013, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449545427617841154, '"checkpoint_time":' '"2024-05-05' '11:28:33.508",' '"error":' null '}'
++ jq -r .state
+ state=normal
+ [[ ! normal == \n\o\r\m\a\l ]]
++ echo '{' '"upstream_id":' 7365352071413983013, '"namespace":' '"default",' '"id":' '"custom-changefeed-name",' '"state":' '"normal",' '"checkpoint_tso":' 449545427617841154, '"checkpoint_time":' '"2024-05-05' '11:28:33.508",' '"error":' null '}'
++ jq -r .error.message
+ message=null
+ [[ ! null =~ null ]]
table multi_rocks.finish_mark not exists for 1-th check, retry later
[2024/05/05 11:28:34.415 +08:00] [INFO] [main.go:234] ["1 delete success: 1100"]
[2024/05/05 11:28:34.416 +08:00] [INFO] [main.go:220] ["1 insert success: 2200"]
[2024/05/05 11:28:34.422 +08:00] [INFO] [main.go:220] ["0 insert success: 2300"]
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[2024/05/05 11:28:34.583 +08:00] [INFO] [main.go:220] ["0 insert success: 2400"]
[2024/05/05 11:28:34.584 +08:00] [INFO] [main.go:234] ["0 delete success: 1200"]
[2024/05/05 11:28:34.605 +08:00] [INFO] [main.go:220] ["1 insert success: 2300"]
[2024/05/05 11:28:34.750 +08:00] [INFO] [main.go:220] ["0 insert success: 2500"]
[2024/05/05 11:28:34.793 +08:00] [INFO] [main.go:234] ["1 delete success: 1200"]
[2024/05/05 11:28:34.794 +08:00] [INFO] [main.go:220] ["1 insert success: 2400"]
wait process cdc.test exit for 3-th time...
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.13564.out cli changefeed create --start-ts=449545418075799553 '--sink-uri=kafka://127.0.0.1:9093/ticdc-cli-test-ssl-13992?protocol=open-protocol&ca=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem&cert=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem&key=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem&kafka-version=2.4.1&max-message-bytes=10485760&insecure-skip-verify=true' --tz=Asia/Shanghai
[2024/05/05 11:28:34.918 +08:00] [INFO] [main.go:220] ["0 insert success: 2600"]
[2024/05/05 11:28:34.920 +08:00] [INFO] [main.go:234] ["0 delete success: 1300"]
[2024/05/05 11:28:34.975 +08:00] [INFO] [main.go:220] ["1 insert success: 2500"]
[WARN] --tz is deprecated in changefeed settings.
[2024/05/05 11:28:35.087 +08:00] [INFO] [main.go:220] ["0 insert success: 2700"]
[2024/05/05 11:28:35.147 +08:00] [INFO] [main.go:234] ["1 delete success: 1300"]
[2024/05/05 11:28:35.148 +08:00] [INFO] [main.go:220] ["1 insert success: 2600"]
[2024/05/05 11:28:35.261 +08:00] [INFO] [main.go:220] ["0 insert success: 2800"]
[2024/05/05 11:28:35.262 +08:00] [INFO] [main.go:234] ["0 delete success: 1400"]
table common_1.finish_mark not exists for 3-th check, retry later
cdc.test: no process found
wait process cdc.test exit for 4-th time...
process cdc.test already exit
[Sun May  5 11:28:35 CST 2024] <<<<<< run test case many_pk_or_uk success! >>>>>>
Create changefeed successfully!
ID: 2e7fba31-ab0a-413a-a320-5767797a5bb5
Info: {"upstream_id":7365352071413983013,"namespace":"default","id":"2e7fba31-ab0a-413a-a320-5767797a5bb5","sink_uri":"kafka://127.0.0.1:9093/ticdc-cli-test-ssl-13992?protocol=open-protocol\u0026ca=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/ca.pem\u0026cert=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client.pem\u0026key=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/_certificates/client-key.pem\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760\u0026insecure-skip-verify=true","create_time":"2024-05-05T11:28:35.470906755+08:00","start_ts":449545418075799553,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545418075799553,"checkpoint_ts":449545418075799553,"checkpoint_time":"2024-05-05 11:27:57.108"}
PASS
[2024/05/05 11:28:35.333 +08:00] [INFO] [main.go:220] ["1 insert success: 2700"]
[2024/05/05 11:28:35.420 +08:00] [INFO] [main.go:220] ["0 insert success: 2900"]
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/simple/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
table multi_rocks.finish_mark not exists for 2-th check, retry later
table test.finish not exists for 37-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.13606.out cli unsafe delete-service-gc-safepoint
table common_1.finish_mark not exists for 4-th check, retry later
Confirm that you know what this command will do and use it at your own risk [Y/N]
CDC service GC safepoint truncated in PD!
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
table test.finish not exists for 38-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.13644.out cli unsafe reset --no-confirm --pd=http://127.0.0.1:2379
table multi_rocks.finish_mark not exists for 3-th check, retry later
start tidb cluster in /tmp/tidb_cdc_test/simple
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table common_1.finish_mark exists
reset and all metadata truncated in PD!
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
check diff successfully
wait process cdc.test exit for 1-th time...
table test.finish not exists for 39-th check, retry later
wait process cdc.test exit for 2-th time...
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/changefeed_auto_stop/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
+ set +x
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:28:40 CST 2024] <<<<<< run test case common_1 success! >>>>>>
table multi_rocks.finish_mark not exists for 4-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2f9e3c0012	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:14507, start at 2024-05-05 11:28:39.333226204 +0800 CST m=+5.130291106	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:39.340 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:39.311 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:39.311 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2f9e3c0012	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:14507, start at 2024-05-05 11:28:39.333226204 +0800 CST m=+5.130291106	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:39.340 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:39.311 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:39.311 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b2fa07c0008	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:14587, start at 2024-05-05 11:28:39.463341081 +0800 CST m=+5.208907566	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:39.469 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:39.455 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:39.455 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/log/error.log
arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table test.finish not exists for 40-th check, retry later
[2024/05/05 11:28:41.320 +08:00] [INFO] [main.go:86] ["running ddl test: 3 addDropColumnDDL2"]
[2024/05/05 11:28:41.520 +08:00] [INFO] [main.go:220] ["0 insert success: 100"]
[2024/05/05 11:28:41.532 +08:00] [INFO] [main.go:220] ["1 insert success: 100"]
[2024/05/05 11:28:41.693 +08:00] [INFO] [main.go:220] ["0 insert success: 200"]
[2024/05/05 11:28:41.695 +08:00] [INFO] [main.go:234] ["0 delete success: 100"]
[2024/05/05 11:28:41.707 +08:00] [INFO] [main.go:234] ["1 delete success: 100"]
[2024/05/05 11:28:41.708 +08:00] [INFO] [main.go:220] ["1 insert success: 200"]
[2024/05/05 11:28:41.859 +08:00] [INFO] [main.go:220] ["0 insert success: 300"]
[2024/05/05 11:28:41.887 +08:00] [INFO] [main.go:220] ["1 insert success: 300"]
[2024/05/05 11:28:42.024 +08:00] [INFO] [main.go:220] ["0 insert success: 400"]
[2024/05/05 11:28:42.026 +08:00] [INFO] [main.go:234] ["0 delete success: 200"]
[2024/05/05 11:28:42.063 +08:00] [INFO] [main.go:234] ["1 delete success: 200"]
[2024/05/05 11:28:42.063 +08:00] [INFO] [main.go:220] ["1 insert success: 400"]
[2024/05/05 11:28:42.191 +08:00] [INFO] [main.go:220] ["0 insert success: 500"]
[2024/05/05 11:28:42.239 +08:00] [INFO] [main.go:220] ["1 insert success: 500"]
table force_replicate_table.t0 exists
table force_replicate_table.t1 exists
table force_replicate_table.t2 exists
table force_replicate_table.t3 not exists for 1-th check, retry later
table multi_rocks.finish_mark exists
check diff successfully
[2024/05/05 11:28:42.358 +08:00] [INFO] [main.go:220] ["0 insert success: 600"]
[2024/05/05 11:28:42.360 +08:00] [INFO] [main.go:234] ["0 delete success: 300"]
[2024/05/05 11:28:42.413 +08:00] [INFO] [main.go:234] ["1 delete success: 300"]
[2024/05/05 11:28:42.414 +08:00] [INFO] [main.go:220] ["1 insert success: 600"]
[2024/05/05 11:28:42.532 +08:00] [INFO] [main.go:220] ["0 insert success: 700"]
[2024/05/05 11:28:42.591 +08:00] [INFO] [main.go:220] ["1 insert success: 700"]
[Sun May  5 11:28:42 CST 2024] <<<<<< START cdc server in kafka_simple_handle_key_only_avro case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only_avro.1592615928.out server --log-file /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_simple_handle_key_only_avro/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
start tidb cluster in /tmp/tidb_cdc_test/changefeed_auto_stop
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
[2024/05/05 11:28:42.700 +08:00] [INFO] [main.go:220] ["0 insert success: 800"]
[2024/05/05 11:28:42.702 +08:00] [INFO] [main.go:234] ["0 delete success: 400"]
[2024/05/05 11:28:42.767 +08:00] [INFO] [main.go:234] ["1 delete success: 400"]
[2024/05/05 11:28:42.768 +08:00] [INFO] [main.go:220] ["1 insert success: 800"]
[2024/05/05 11:28:42.864 +08:00] [INFO] [main.go:220] ["0 insert success: 900"]
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
[2024/05/05 11:28:42.939 +08:00] [INFO] [main.go:220] ["1 insert success: 900"]
[2024/05/05 11:28:43.028 +08:00] [INFO] [main.go:220] ["0 insert success: 1000"]
[2024/05/05 11:28:43.030 +08:00] [INFO] [main.go:234] ["0 delete success: 500"]
[2024/05/05 11:28:43.114 +08:00] [INFO] [main.go:234] ["1 delete success: 500"]
[2024/05/05 11:28:43.115 +08:00] [INFO] [main.go:220] ["1 insert success: 1000"]
wait process cdc.test exit for 1-th time...
table force_replicate_table.t3 exists
table force_replicate_table.t4 not exists for 1-th check, retry later
[2024/05/05 11:28:43.192 +08:00] [INFO] [main.go:220] ["0 insert success: 1100"]
[2024/05/05 11:28:43.289 +08:00] [INFO] [main.go:220] ["1 insert success: 1100"]
[2024/05/05 11:28:43.357 +08:00] [INFO] [main.go:220] ["0 insert success: 1200"]
[2024/05/05 11:28:43.358 +08:00] [INFO] [main.go:234] ["0 delete success: 600"]
wait process cdc.test exit for 2-th time...
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[2024/05/05 11:28:43.464 +08:00] [INFO] [main.go:234] ["1 delete success: 600"]
[2024/05/05 11:28:43.465 +08:00] [INFO] [main.go:220] ["1 insert success: 1200"]
[2024/05/05 11:28:43.512 +08:00] [INFO] [main.go:220] ["0 insert success: 1300"]
[2024/05/05 11:28:43.653 +08:00] [INFO] [main.go:220] ["1 insert success: 1300"]
[2024/05/05 11:28:43.687 +08:00] [INFO] [main.go:220] ["0 insert success: 1400"]
[2024/05/05 11:28:43.689 +08:00] [INFO] [main.go:234] ["0 delete success: 700"]
[2024/05/05 11:28:43.829 +08:00] [INFO] [main.go:234] ["1 delete success: 700"]
[2024/05/05 11:28:43.830 +08:00] [INFO] [main.go:220] ["1 insert success: 1400"]
[2024/05/05 11:28:43.849 +08:00] [INFO] [main.go:220] ["0 insert success: 1500"]
table test.finish not exists for 41-th check, retry later
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.13748.out cli unsafe resolve-lock --region=36
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:28:43 CST 2024] <<<<<< run test case multi_rocks success! >>>>>>
[2024/05/05 11:28:44.007 +08:00] [INFO] [main.go:220] ["1 insert success: 1500"]
[2024/05/05 11:28:44.009 +08:00] [INFO] [main.go:220] ["0 insert success: 1600"]
[2024/05/05 11:28:44.011 +08:00] [INFO] [main.go:234] ["0 delete success: 800"]
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
[2024/05/05 11:28:44.177 +08:00] [INFO] [main.go:220] ["0 insert success: 1700"]
Verifying downstream PD is started...
[2024/05/05 11:28:44.180 +08:00] [INFO] [main.go:234] ["1 delete success: 800"]
[2024/05/05 11:28:44.180 +08:00] [INFO] [main.go:220] ["1 insert success: 1600"]
[2024/05/05 11:28:44.349 +08:00] [INFO] [main.go:220] ["0 insert success: 1800"]
[2024/05/05 11:28:44.350 +08:00] [INFO] [main.go:234] ["0 delete success: 900"]
[2024/05/05 11:28:44.359 +08:00] [INFO] [main.go:220] ["1 insert success: 1700"]
[2024/05/05 11:28:44.520 +08:00] [INFO] [main.go:220] ["0 insert success: 1900"]
[2024/05/05 11:28:44.531 +08:00] [INFO] [main.go:234] ["1 delete success: 900"]
[2024/05/05 11:28:44.532 +08:00] [INFO] [main.go:220] ["1 insert success: 1800"]
[2024/05/05 11:28:44.694 +08:00] [INFO] [main.go:220] ["0 insert success: 2000"]
[2024/05/05 11:28:44.696 +08:00] [INFO] [main.go:234] ["0 delete success: 1000"]
[2024/05/05 11:28:44.715 +08:00] [INFO] [main.go:220] ["1 insert success: 1900"]
[2024/05/05 11:28:44.853 +08:00] [INFO] [main.go:220] ["0 insert success: 2100"]
[2024/05/05 11:28:44.888 +08:00] [INFO] [main.go:234] ["1 delete success: 1000"]
[2024/05/05 11:28:44.889 +08:00] [INFO] [main.go:220] ["1 insert success: 2000"]
table force_replicate_table.t4 exists
table force_replicate_table.t5 not exists for 1-th check, retry later
[2024/05/05 11:28:45.027 +08:00] [INFO] [main.go:220] ["0 insert success: 2200"]
[2024/05/05 11:28:45.029 +08:00] [INFO] [main.go:234] ["0 delete success: 1100"]
[2024/05/05 11:28:45.079 +08:00] [INFO] [main.go:220] ["1 insert success: 2100"]
[2024/05/05 11:28:45.196 +08:00] [INFO] [main.go:220] ["0 insert success: 2300"]
[2024/05/05 11:28:45.255 +08:00] [INFO] [main.go:234] ["1 delete success: 1100"]
[2024/05/05 11:28:45.256 +08:00] [INFO] [main.go:220] ["1 insert success: 2200"]
[2024/05/05 11:28:45.356 +08:00] [INFO] [main.go:220] ["0 insert success: 2400"]
[2024/05/05 11:28:45.358 +08:00] [INFO] [main.go:234] ["0 delete success: 1200"]
[2024/05/05 11:28:45.427 +08:00] [INFO] [main.go:220] ["1 insert success: 2300"]
[2024/05/05 11:28:45.520 +08:00] [INFO] [main.go:220] ["0 insert success: 2500"]
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cli_with_auth.cli.13784.out cli unsafe resolve-lock --region=36 --ts=449545429479325699
[2024/05/05 11:28:45.599 +08:00] [INFO] [main.go:234] ["1 delete success: 1200"]
[2024/05/05 11:28:45.599 +08:00] [INFO] [main.go:220] ["1 insert success: 2400"]
[2024/05/05 11:28:45.685 +08:00] [INFO] [main.go:220] ["0 insert success: 2600"]
[2024/05/05 11:28:45.687 +08:00] [INFO] [main.go:234] ["0 delete success: 1300"]
[2024/05/05 11:28:45.771 +08:00] [INFO] [main.go:220] ["1 insert success: 2500"]
PASS
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:28:45 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/d042d029-7714-4b34-9638-94e2f7363222
	{"id":"d042d029-7714-4b34-9638-94e2f7363222","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879722}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cbc1a4c5
	d042d029-7714-4b34-9638-94e2f7363222

/tidb/cdc/default/default/upstream/7365352278495986351
	{"id":7365352278495986351,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/d042d029-7714-4b34-9638-94e2f7363222
	{"id":"d042d029-7714-4b34-9638-94e2f7363222","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879722}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cbc1a4c5
	d042d029-7714-4b34-9638-94e2f7363222

/tidb/cdc/default/default/upstream/7365352278495986351
	{"id":7365352278495986351,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/d042d029-7714-4b34-9638-94e2f7363222
	{"id":"d042d029-7714-4b34-9638-94e2f7363222","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879722}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cbc1a4c5
	d042d029-7714-4b34-9638-94e2f7363222

/tidb/cdc/default/default/upstream/7365352278495986351
	{"id":7365352278495986351,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only_avro.cli.15980.out cli tso query --pd=http://127.0.0.1:2379
[2024/05/05 11:28:45.854 +08:00] [INFO] [main.go:220] ["0 insert success: 2700"]
[2024/05/05 11:28:45.935 +08:00] [INFO] [main.go:234] ["1 delete success: 1300"]
[2024/05/05 11:28:45.936 +08:00] [INFO] [main.go:220] ["1 insert success: 2600"]
[2024/05/05 11:28:46.023 +08:00] [INFO] [main.go:220] ["0 insert success: 2800"]
[2024/05/05 11:28:46.024 +08:00] [INFO] [main.go:234] ["0 delete success: 1400"]
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
[2024/05/05 11:28:46.113 +08:00] [INFO] [main.go:220] ["1 insert success: 2700"]
[2024/05/05 11:28:46.193 +08:00] [INFO] [main.go:220] ["0 insert success: 2900"]
[2024/05/05 11:28:46.283 +08:00] [INFO] [main.go:234] ["1 delete success: 1400"]
[2024/05/05 11:28:46.284 +08:00] [INFO] [main.go:220] ["1 insert success: 2800"]
\033[0;36m<<< Run all test success >>>\033[0m
table test.finish not exists for 42-th check, retry later
[Pipeline] }
Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc already exists)
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
table force_replicate_table.t5 exists
table force_replicate_table.t6 not exists for 1-th check, retry later
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] }
+ set +x
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   218  100   212  100     6   175k   5084 --:--:-- --:--:-- --:--:--  207k
{
    "error_msg": "[CDC:ErrAPIInvalidParam]invalid log level: json: cannot unmarshal string into Go value of type struct { Level string \"json:\\\"log_level\\\"\" }",
    "error_code": "CDC:ErrAPIInvalidParam"
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/capture_session_done_during_task/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
+ set +x
+ tso='449545430882058244
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545430882058244 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only_avro.cli.16022.out cli changefeed create --start-ts=449545430882058244 '--sink-uri=kafka://127.0.0.1:9092/simple-handle-key-only-avro-16393?protocol=simple&encoding-format=avro' -c simple-handle-key-only-avro --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only_avro/conf/changefeed.toml
[Pipeline] // node
[Pipeline] }
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[Pipeline] // podTemplate
Create changefeed successfully!
ID: simple-handle-key-only-avro
Info: {"upstream_id":7365352278495986351,"namespace":"default","id":"simple-handle-key-only-avro","sink_uri":"kafka://127.0.0.1:9092/simple-handle-key-only-avro-16393?protocol=simple\u0026encoding-format=avro","create_time":"2024-05-05T11:28:47.870914885+08:00","start_ts":449545430882058244,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"simple","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"kafka_config":{"large_message_handle":{"large_message_handle_option":"handle-key-only","large_message_handle_compression":"lz4","claim_check_storage_uri":""}},"advance_timeout":150,"send_bootstrap_interval_in_sec":0,"send_bootstrap_in_msg_count":0,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545430882058244,"checkpoint_ts":449545430882058244,"checkpoint_time":"2024-05-05 11:28:45.960"}
PASS
coverage: 2.5% of statements in github.com/pingcap/tiflow/...
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
table test.finish not exists for 43-th check, retry later
[Pipeline] // stage
[Pipeline] }
+ set +x
table force_replicate_table.t6 exists
check_data_subset force_replicate_table.t0 127.0.0.1 4000 127.0.0.1 3306
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b303b000014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:21461, start at 2024-05-05 11:28:49.384173557 +0800 CST m=+5.898077514	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:49.392 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:49.394 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:49.394 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
run task successfully
check_data_subset force_replicate_table.t1 127.0.0.1 4000 127.0.0.1 3306
table test.finish not exists for 44-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
}  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   182  100   182    0     0   219k      0 --:--:-- --:--:-- --:--:--  177k
{
 "version": "v8.2.0-alpha-53-g0de8dc3e4",
 "git_hash": "0de8dc3e43ec741eba58047155ce7f3dba8eb4f7",
 "id": "9e4229d7-bb56-473b-a990-ac525600bf5a",
 "pid": 12678,
 "is_owner": true
run task successfully
check_data_subset force_replicate_table.t2 127.0.0.1 4000 127.0.0.1 3306
start tidb cluster in /tmp/tidb_cdc_test/capture_session_done_during_task
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
}wait process cdc.test exit for 1-th time...
run task successfully
check_data_subset force_replicate_table.t3 127.0.0.1 4000 127.0.0.1 3306
wait process cdc.test exit for 2-th time...
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/generate_column/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b303b000014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:21461, start at 2024-05-05 11:28:49.384173557 +0800 CST m=+5.898077514	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:49.392 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:49.394 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:49.394 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3030d40014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:21549, start at 2024-05-05 11:28:48.72820906 +0800 CST m=+5.189335742	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:48.734 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:48.693 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:48.693 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:28:51 CST 2024] <<<<<< run test case cli_with_auth success! >>>>>>
Logging trace to /tmp/tidb_cdc_test/simple/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/simple/tiflash/log/error.log
arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/simple/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/simple/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/simple/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table test.finish not exists for 45-th check, retry later
run task successfully
check_data_subset force_replicate_table.t4 127.0.0.1 4000 127.0.0.1 3306
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[2024/05/05 11:28:52.205 +08:00] [INFO] [main.go:86] ["running ddl test: 4 modifyColumnDDL"]
[2024/05/05 11:28:52.393 +08:00] [INFO] [main.go:220] ["0 insert success: 100"]
[2024/05/05 11:28:52.404 +08:00] [INFO] [main.go:220] ["1 insert success: 100"]
[2024/05/05 11:28:52.555 +08:00] [INFO] [main.go:220] ["0 insert success: 200"]
[2024/05/05 11:28:52.557 +08:00] [INFO] [main.go:234] ["0 delete success: 100"]
[2024/05/05 11:28:52.574 +08:00] [INFO] [main.go:220] ["1 insert success: 200"]
[2024/05/05 11:28:52.576 +08:00] [INFO] [main.go:234] ["1 delete success: 100"]
[2024/05/05 11:28:52.729 +08:00] [INFO] [main.go:220] ["0 insert success: 300"]
[2024/05/05 11:28:52.738 +08:00] [INFO] [main.go:220] ["1 insert success: 300"]
run task successfully
check_data_subset force_replicate_table.t5 127.0.0.1 4000 127.0.0.1 3306
\033[0;36m<<< Run all test success >>>\033[0m
[Pipeline] }
Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc already exists)
[Pipeline] // cache
[Pipeline] }
[2024/05/05 11:28:52.904 +08:00] [INFO] [main.go:220] ["0 insert success: 400"]
[2024/05/05 11:28:52.906 +08:00] [INFO] [main.go:234] ["0 delete success: 200"]
[2024/05/05 11:28:52.907 +08:00] [INFO] [main.go:220] ["1 insert success: 400"]
[2024/05/05 11:28:52.909 +08:00] [INFO] [main.go:234] ["1 delete success: 200"]
[2024/05/05 11:28:53.070 +08:00] [INFO] [main.go:220] ["0 insert success: 500"]
[Pipeline] // dir
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[2024/05/05 11:28:53.083 +08:00] [INFO] [main.go:220] ["1 insert success: 500"]
[2024/05/05 11:28:53.228 +08:00] [INFO] [main.go:220] ["0 insert success: 600"]
[2024/05/05 11:28:53.230 +08:00] [INFO] [main.go:234] ["0 delete success: 300"]
[2024/05/05 11:28:53.259 +08:00] [INFO] [main.go:220] ["1 insert success: 600"]
[2024/05/05 11:28:53.260 +08:00] [INFO] [main.go:234] ["1 delete success: 300"]
[Pipeline] // stage
[Pipeline] }
run task successfully
check_data_subset force_replicate_table.t6 127.0.0.1 4000 127.0.0.1 3306
id=19,a=NULL doesn't exist in downstream table force_replicate_table.t6
run task failed 1-th time, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
[Pipeline] // container
[Pipeline] }
start tidb cluster in /tmp/tidb_cdc_test/generate_column
Starting Upstream PD...
[Pipeline] // withEnv
[2024/05/05 11:28:53.387 +08:00] [INFO] [main.go:220] ["0 insert success: 700"]
[2024/05/05 11:28:53.436 +08:00] [INFO] [main.go:220] ["1 insert success: 700"]
[2024/05/05 11:28:53.548 +08:00] [INFO] [main.go:220] ["0 insert success: 800"]
[2024/05/05 11:28:53.549 +08:00] [INFO] [main.go:234] ["0 delete success: 400"]
[Pipeline] }
[Pipeline] // node
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
[Pipeline] }
[2024/05/05 11:28:53.617 +08:00] [INFO] [main.go:220] ["1 insert success: 800"]
[2024/05/05 11:28:53.619 +08:00] [INFO] [main.go:234] ["1 delete success: 400"]
[2024/05/05 11:28:53.720 +08:00] [INFO] [main.go:220] ["0 insert success: 900"]
[2024/05/05 11:28:53.795 +08:00] [INFO] [main.go:220] ["1 insert success: 900"]
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.simple.cli.22953.out cli tso query --pd=http://127.0.0.1:2379
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[2024/05/05 11:28:48.792 +08:00] [INFO] [case.go:115] ["sync updatePKUK take: 12.356766937s"]
[2024/05/05 11:28:53.881 +08:00] [INFO] [main.go:220] ["0 insert success: 1000"]
[2024/05/05 11:28:53.882 +08:00] [INFO] [main.go:234] ["0 delete success: 500"]
[2024/05/05 11:28:53.972 +08:00] [INFO] [main.go:220] ["1 insert success: 1000"]
[2024/05/05 11:28:53.974 +08:00] [INFO] [main.go:234] ["1 delete success: 500"]
[2024/05/05 11:28:54.045 +08:00] [INFO] [main.go:220] ["0 insert success: 1100"]
[Pipeline] // stage
table test.finish not exists for 46-th check, retry later
[Pipeline] }
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[2024/05/05 11:28:54.144 +08:00] [INFO] [main.go:220] ["1 insert success: 1100"]
[2024/05/05 11:28:54.203 +08:00] [INFO] [main.go:220] ["0 insert success: 1200"]
[2024/05/05 11:28:54.204 +08:00] [INFO] [main.go:234] ["0 delete success: 600"]
[2024/05/05 11:28:54.312 +08:00] [INFO] [main.go:220] ["1 insert success: 1200"]
[2024/05/05 11:28:54.314 +08:00] [INFO] [main.go:234] ["1 delete success: 600"]
[2024/05/05 11:28:54.359 +08:00] [INFO] [main.go:220] ["0 insert success: 1300"]
[2024/05/05 11:28:54.482 +08:00] [INFO] [main.go:220] ["1 insert success: 1300"]
[2024/05/05 11:28:54.513 +08:00] [INFO] [main.go:220] ["0 insert success: 1400"]
[2024/05/05 11:28:54.515 +08:00] [INFO] [main.go:234] ["0 delete success: 700"]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only_avro.cli.16081.out cli changefeed pause -c simple-handle-key-only-avro
[2024/05/05 11:28:54.656 +08:00] [INFO] [main.go:220] ["1 insert success: 1400"]
[2024/05/05 11:28:54.658 +08:00] [INFO] [main.go:234] ["1 delete success: 700"]
[2024/05/05 11:28:54.672 +08:00] [INFO] [main.go:220] ["0 insert success: 1500"]
[2024/05/05 11:28:54.828 +08:00] [INFO] [main.go:220] ["0 insert success: 1600"]
[2024/05/05 11:28:54.829 +08:00] [INFO] [main.go:234] ["0 delete success: 800"]
[2024/05/05 11:28:54.830 +08:00] [INFO] [main.go:220] ["1 insert success: 1500"]
[2024/05/05 11:28:54.987 +08:00] [INFO] [main.go:220] ["0 insert success: 1700"]
[2024/05/05 11:28:55.002 +08:00] [INFO] [main.go:220] ["1 insert success: 1600"]
[2024/05/05 11:28:55.003 +08:00] [INFO] [main.go:234] ["1 delete success: 800"]
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[2024/05/05 11:28:55.144 +08:00] [INFO] [main.go:220] ["0 insert success: 1800"]
[2024/05/05 11:28:55.145 +08:00] [INFO] [main.go:234] ["0 delete success: 900"]
[2024/05/05 11:28:55.163 +08:00] [INFO] [main.go:220] ["1 insert success: 1700"]
[2024/05/05 11:28:55.310 +08:00] [INFO] [main.go:220] ["0 insert success: 1900"]
[2024/05/05 11:28:55.338 +08:00] [INFO] [main.go:220] ["1 insert success: 1800"]
[2024/05/05 11:28:55.340 +08:00] [INFO] [main.go:234] ["1 delete success: 900"]
Verifying downstream PD is started...
[2024/05/05 11:28:55.473 +08:00] [INFO] [main.go:220] ["0 insert success: 2000"]
[2024/05/05 11:28:55.474 +08:00] [INFO] [main.go:234] ["0 delete success: 1000"]
[2024/05/05 11:28:55.499 +08:00] [INFO] [main.go:220] ["1 insert success: 1900"]
[2024/05/05 11:28:55.630 +08:00] [INFO] [main.go:220] ["0 insert success: 2100"]
+ set +x
+ tso='449545433027444738
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545433027444738 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
check_data_subset force_replicate_table.t6 127.0.0.1 4000 127.0.0.1 3306
id=7,a=NULL doesn't exist in downstream table force_replicate_table.t6
run task failed 2-th time, retry later
[2024/05/05 11:28:55.672 +08:00] [INFO] [main.go:220] ["1 insert success: 2000"]
[2024/05/05 11:28:55.674 +08:00] [INFO] [main.go:234] ["1 delete success: 1000"]
[2024/05/05 11:28:55.809 +08:00] [INFO] [main.go:220] ["0 insert success: 2200"]
[2024/05/05 11:28:55.810 +08:00] [INFO] [main.go:234] ["0 delete success: 1100"]
[2024/05/05 11:28:55.856 +08:00] [INFO] [main.go:220] ["1 insert success: 2100"]
[Sun May  5 11:28:55 CST 2024] <<<<<< START cdc server in simple case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.simple.2300623008.out server --log-file /tmp/tidb_cdc_test/simple/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/simple/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
table test.finish not exists for 47-th check, retry later
\033[0;36m<<< Run all test success >>>\033[0m
[Pipeline] }
[2024/05/05 11:28:55.983 +08:00] [INFO] [main.go:220] ["0 insert success: 2300"]
[2024/05/05 11:28:56.041 +08:00] [INFO] [main.go:220] ["1 insert success: 2200"]
[2024/05/05 11:28:56.043 +08:00] [INFO] [main.go:234] ["1 delete success: 1100"]
[2024/05/05 11:28:56.151 +08:00] [INFO] [main.go:220] ["0 insert success: 2400"]
[2024/05/05 11:28:56.152 +08:00] [INFO] [main.go:234] ["0 delete success: 1200"]
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3075c00008	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-6skrl-cxvmd, pid:10899, start at 2024-05-05 11:28:53.109616922 +0800 CST m=+5.187241460	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:53.116 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:53.104 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:53.104 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3075c00008	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-6skrl-cxvmd, pid:10899, start at 2024-05-05 11:28:53.109616922 +0800 CST m=+5.187241460	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:53.116 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:53.104 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:53.104 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b307728000f	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-6skrl-cxvmd, pid:10982, start at 2024-05-05 11:28:53.209181812 +0800 CST m=+5.235057108	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:30:53.218 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:28:53.194 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:18:53.194 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/changefeed_auto_stop/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/changefeed_auto_stop/tiflash/log/error.log
arg matches is ArgMatches { args: {"log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/changefeed_auto_stop/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/changefeed_auto_stop/tiflash-proxy.toml"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/changefeed_auto_stop/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
[2024/05/05 11:28:56.232 +08:00] [INFO] [main.go:220] ["1 insert success: 2300"]
[2024/05/05 11:28:56.347 +08:00] [INFO] [main.go:220] ["0 insert success: 2500"]
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only_avro.cli.16114.out cli changefeed update -c simple-handle-key-only-avro '--sink-uri=kafka://127.0.0.1:9092/simple-handle-key-only-avro-16393?protocol=simple&encoding-format=avro&max-message-bytes=650' --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only_avro/conf/changefeed.toml --no-confirm
[2024/05/05 11:28:56.429 +08:00] [INFO] [main.go:220] ["1 insert success: 2400"]
[2024/05/05 11:28:56.431 +08:00] [INFO] [main.go:234] ["1 delete success: 1200"]
[2024/05/05 11:28:56.524 +08:00] [INFO] [main.go:220] ["0 insert success: 2600"]
[2024/05/05 11:28:56.525 +08:00] [INFO] [main.go:234] ["0 delete success: 1300"]
[2024/05/05 11:28:56.611 +08:00] [INFO] [main.go:220] ["1 insert success: 2500"]
[2024/05/05 11:28:56.683 +08:00] [INFO] [main.go:220] ["0 insert success: 2700"]
[2024/05/05 11:28:56.792 +08:00] [INFO] [main.go:220] ["1 insert success: 2600"]
[2024/05/05 11:28:56.794 +08:00] [INFO] [main.go:234] ["1 delete success: 1300"]
[2024/05/05 11:28:56.858 +08:00] [INFO] [main.go:220] ["0 insert success: 2800"]
[2024/05/05 11:28:56.859 +08:00] [INFO] [main.go:234] ["0 delete success: 1400"]
Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc already exists)
Diff of changefeed config:
{Type:update Path:[SinkURI] From:kafka://127.0.0.1:9092/simple-handle-key-only-avro-16393?protocol=simple&encoding-format=avro To:kafka://127.0.0.1:9092/simple-handle-key-only-avro-16393?protocol=simple&encoding-format=avro&max-message-bytes=650}
{Type:update Path:[Config SyncPointInterval] From:<nil> To:0xc003933458}
{Type:update Path:[Config SyncPointRetention] From:<nil> To:0xc003933468}
{Type:update Path:[Config Consistent] From:<nil> To:0xc0013e6230}
[2024/05/05 11:28:56.987 +08:00] [INFO] [main.go:220] ["1 insert success: 2700"]
[2024/05/05 11:28:57.033 +08:00] [INFO] [main.go:220] ["0 insert success: 2900"]
[2024/05/05 11:28:57.160 +08:00] [INFO] [main.go:220] ["1 insert success: 2800"]
[2024/05/05 11:28:57.162 +08:00] [INFO] [main.go:234] ["1 delete success: 1400"]
[Pipeline] // cache
Update changefeed config successfully! 
ID: simple-handle-key-only-avro
Info: {"upstream_id":7365352278495986351,"namespace":"default","id":"simple-handle-key-only-avro","sink_uri":"kafka://127.0.0.1:9092/simple-handle-key-only-avro-16393?protocol=simple\u0026encoding-format=avro\u0026max-message-bytes=650","create_time":"2024-05-05T11:28:47.870914885+08:00","start_ts":449545430882058244,"admin_job_type":1,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_table_monitor":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"simple","encoder_concurrency":32,"terminator":"\r\n","enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"kafka_config":{"large_message_handle":{"large_message_handle_option":"handle-key-only","large_message_handle_compression":"lz4","claim_check_storage_uri":""}},"advance_timeout":150,"send_bootstrap_interval_in_sec":0,"send_bootstrap_in_msg_count":0,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"stopped","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":0,"checkpoint_ts":449545433057853446,"checkpoint_time":"2024-05-05 11:28:54.260"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
[2024/05/05 11:28:57.202 +08:00] [INFO] [main.go:220] ["0 insert success: 3000"]
[2024/05/05 11:28:57.203 +08:00] [INFO] [main.go:234] ["0 delete success: 1500"]
[Pipeline] }
[Pipeline] // dir
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[Pipeline] // stage
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
table test.finish not exists for 48-th check, retry later
[Pipeline] // node
[Pipeline] }
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_handle_key_only_avro.cli.16147.out cli changefeed resume -c simple-handle-key-only-avro
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:28:58 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/0215f6c5-23b3-4aa0-8521-03fdb44334c5
	{"id":"0215f6c5-23b3-4aa0-8521-03fdb44334c5","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879736}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cbe3f5ed
	0215f6c5-23b3-4aa0-8521-03fdb44334c5

/tidb/cdc/default/default/upstream/7365352317682490966
	{"id":7365352317682490966,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/0215f6c5-23b3-4aa0-8521-03fdb44334c5
	{"id":"0215f6c5-23b3-4aa0-8521-03fdb44334c5","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879736}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cbe3f5ed
	0215f6c5-23b3-4aa0-8521-03fdb44334c5

/tidb/cdc/default/default/upstream/7365352317682490966
	{"id":7365352317682490966,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/0215f6c5-23b3-4aa0-8521-03fdb44334c5
	{"id":"0215f6c5-23b3-4aa0-8521-03fdb44334c5","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879736}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cbe3f5ed
	0215f6c5-23b3-4aa0-8521-03fdb44334c5

/tidb/cdc/default/default/upstream/7365352317682490966
	{"id":7365352317682490966,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.simple.cli.23061.out cli changefeed create --start-ts=449545433027444738 '--sink-uri=kafka+ssl://127.0.0.1:9092/ticdc-simple-test-22865?protocol=open-protocol&partition-num=4&kafka-client-id=cdc_test_simple&kafka-version=2.4.1&max-message-bytes=10485760'
***************** properties *****************
"mysql.db"="changefeed_auto_stop_1"
"updateproportion"="0"
"mysql.user"="root"
"workload"="core"
"requestdistribution"="uniform"
"threadcount"="4"
"recordcount"="20"
"readallfields"="true"
"operationcount"="0"
"mysql.host"="127.0.0.1"
"readproportion"="0"
"scanproportion"="0"
"insertproportion"="0"
"dotransactions"="false"
"mysql.port"="4000"
**********************************************
Run finished, takes 10.571651ms
INSERT - Takes(s): 0.0, Count: 20, OPS: 3637.6, Avg(us): 2036, Min(us): 1059, Max(us): 5341, 95th(us): 6000, 99th(us): 6000
Create changefeed successfully!
ID: 64ee118d-f294-4a8a-b086-66a24bf1cf6b
Info: {"upstream_id":7365352317682490966,"namespace":"default","id":"64ee118d-f294-4a8a-b086-66a24bf1cf6b","sink_uri":"kafka+ssl://127.0.0.1:9092/ticdc-simple-test-22865?protocol=open-protocol\u0026partition-num=4\u0026kafka-client-id=cdc_test_simple\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:28:59.369894664+08:00","start_ts":449545433027444738,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545433027444738,"checkpoint_ts":449545433027444738,"checkpoint_time":"2024-05-05 11:28:54.144"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
PASS
coverage: 2.1% of statements in github.com/pingcap/tiflow/...
check_data_subset force_replicate_table.t6 127.0.0.1 4000 127.0.0.1 3306
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
run task successfully
table test.finish not exists for 49-th check, retry later
***************** properties *****************
"updateproportion"="0"
"requestdistribution"="uniform"
"threadcount"="4"
"scanproportion"="0"
"recordcount"="20"
"operationcount"="0"
"mysql.port"="4000"
"workload"="core"
"readproportion"="0"
"mysql.host"="127.0.0.1"
"mysql.user"="root"
"insertproportion"="0"
"mysql.db"="changefeed_auto_stop_2"
"readallfields"="true"
"dotransactions"="false"
**********************************************
Run finished, takes 10.272949ms
INSERT - Takes(s): 0.0, Count: 20, OPS: 3411.5, Avg(us): 1863, Min(us): 1029, Max(us): 4332, 95th(us): 5000, 99th(us): 5000
wait process cdc.test exit for 1-th time...
+ set +x
[Sun May  5 11:29:00 CST 2024] <<<<<< START kafka consumer in simple case >>>>>>
succeed to verify meta placement rules
+ set +x
table test.finish_mark not exists for 1-th check, retry later
ERROR 1146 (42S02) at line 1: Table 'test.simple1' doesn't exist
check data failed 1-th time, retry later
wait process cdc.test exit for 2-th time...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b30e95c0006	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-klk36-npv9p, pid:20235, start at 2024-05-05 11:29:00.512451403 +0800 CST m=+5.400190490	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:31:00.519 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:29:00.503 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:19:00.503 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:29:01 CST 2024] <<<<<< run test case force_replicate_table success! >>>>>>
***************** properties *****************
"requestdistribution"="uniform"
"scanproportion"="0"
"insertproportion"="0"
"readproportion"="0"
"updateproportion"="0"
"threadcount"="4"
"operationcount"="0"
"mysql.host"="127.0.0.1"
"mysql.db"="changefeed_auto_stop_3"
"readallfields"="true"
"mysql.user"="root"
"dotransactions"="false"
"mysql.port"="4000"
"workload"="core"
"recordcount"="20"
**********************************************
Run finished, takes 9.956175ms
INSERT - Takes(s): 0.0, Count: 20, OPS: 3747.3, Avg(us): 1919, Min(us): 1065, Max(us): 4507, 95th(us): 5000, 99th(us): 5000
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 50-th check, retry later
***************** properties *****************
"workload"="core"
"threadcount"="4"
"mysql.port"="4000"
"mysql.user"="root"
"readallfields"="true"
"operationcount"="0"
"scanproportion"="0"
"mysql.db"="changefeed_auto_stop_4"
"updateproportion"="0"
"requestdistribution"="uniform"
"recordcount"="20"
"mysql.host"="127.0.0.1"
"dotransactions"="false"
"readproportion"="0"
"insertproportion"="0"
**********************************************
Run finished, takes 9.958777ms
INSERT - Takes(s): 0.0, Count: 20, OPS: 3565.9, Avg(us): 1890, Min(us): 1104, Max(us): 4333, 95th(us): 5000, 99th(us): 5000
[Sun May  5 11:29:02 CST 2024] <<<<<< START cdc server in changefeed_auto_stop case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ GO_FAILPOINTS=
+ (( i = 0 ))
+ (( i <= 50 ))
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_auto_stop.1250812510.out server --log-file /tmp/tidb_cdc_test/changefeed_auto_stop/cdc1.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_auto_stop/cdc_data1 --cluster-id default --addr 127.0.0.1:8301 --pd http://127.0.0.1:2379
++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8301 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8301; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b30e95c0006	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-klk36-npv9p, pid:20235, start at 2024-05-05 11:29:00.512451403 +0800 CST m=+5.400190490	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:31:00.519 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:29:00.503 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:19:00.503 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b30ead40015	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-klk36-npv9p, pid:20314, start at 2024-05-05 11:29:00.642162244 +0800 CST m=+5.478766260	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:31:00.649 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:29:00.647 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:19:00.647 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/capture_session_done_during_task/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/capture_session_done_during_task/tiflash/log/error.log
arg matches is ArgMatches { args: {"log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/capture_session_done_during_task/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/capture_session_done_during_task/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/capture_session_done_during_task/tiflash-proxy.toml"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table test.finish_mark not exists for 2-th check, retry later
ERROR 1146 (42S02) at line 1: Table 'test.simple1' doesn't exist
check data failed 2-th time, retry later
[2024/05/05 11:29:03.096 +08:00] [INFO] [main.go:86] ["running ddl test: 5 addDropIndexDDL"]
[2024/05/05 11:29:03.289 +08:00] [INFO] [main.go:220] ["0 insert success: 100"]
[2024/05/05 11:29:03.304 +08:00] [INFO] [main.go:220] ["1 insert success: 100"]
[2024/05/05 11:29:03.442 +08:00] [INFO] [main.go:234] ["0 delete success: 100"]
[2024/05/05 11:29:03.443 +08:00] [INFO] [main.go:220] ["0 insert success: 200"]
[2024/05/05 11:29:03.474 +08:00] [INFO] [main.go:220] ["1 insert success: 200"]
[2024/05/05 11:29:03.475 +08:00] [INFO] [main.go:234] ["1 delete success: 100"]
[2024/05/05 11:29:03.615 +08:00] [INFO] [main.go:220] ["0 insert success: 300"]
[2024/05/05 11:29:03.648 +08:00] [INFO] [main.go:220] ["1 insert success: 300"]
[2024/05/05 11:29:03.779 +08:00] [INFO] [main.go:234] ["0 delete success: 200"]
[2024/05/05 11:29:03.780 +08:00] [INFO] [main.go:220] ["0 insert success: 400"]
[2024/05/05 11:29:03.818 +08:00] [INFO] [main.go:220] ["1 insert success: 400"]
[2024/05/05 11:29:03.820 +08:00] [INFO] [main.go:234] ["1 delete success: 200"]
[2024/05/05 11:29:03.949 +08:00] [INFO] [main.go:220] ["0 insert success: 500"]
[2024/05/05 11:29:03.995 +08:00] [INFO] [main.go:220] ["1 insert success: 500"]
[2024/05/05 11:29:04.107 +08:00] [INFO] [main.go:234] ["0 delete success: 300"]
[2024/05/05 11:29:04.107 +08:00] [INFO] [main.go:220] ["0 insert success: 600"]
[2024/05/05 11:29:04.174 +08:00] [INFO] [main.go:220] ["1 insert success: 600"]
[2024/05/05 11:29:04.176 +08:00] [INFO] [main.go:234] ["1 delete success: 300"]
table test.finish not exists for 51-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[2024/05/05 11:29:04.278 +08:00] [INFO] [main.go:220] ["0 insert success: 700"]
[2024/05/05 11:29:04.382 +08:00] [INFO] [main.go:220] ["1 insert success: 700"]
[2024/05/05 11:29:04.497 +08:00] [INFO] [main.go:234] ["0 delete success: 400"]
[2024/05/05 11:29:04.498 +08:00] [INFO] [main.go:220] ["0 insert success: 800"]
[2024/05/05 11:29:04.623 +08:00] [INFO] [main.go:220] ["1 insert success: 800"]
[2024/05/05 11:29:04.625 +08:00] [INFO] [main.go:234] ["1 delete success: 400"]
[2024/05/05 11:29:04.725 +08:00] [INFO] [main.go:220] ["0 insert success: 900"]
[2024/05/05 11:29:04.877 +08:00] [INFO] [main.go:220] ["1 insert success: 900"]
[2024/05/05 11:29:04.960 +08:00] [INFO] [main.go:234] ["0 delete success: 500"]
[2024/05/05 11:29:04.961 +08:00] [INFO] [main.go:220] ["0 insert success: 1000"]
[2024/05/05 11:29:05.144 +08:00] [INFO] [main.go:220] ["1 insert success: 1000"]
[2024/05/05 11:29:05.147 +08:00] [INFO] [main.go:234] ["1 delete success: 500"]
[2024/05/05 11:29:05.199 +08:00] [INFO] [main.go:220] ["0 insert success: 1100"]
table test.finish_mark exists
check diff successfully
TEST FAILED: OUTPUT DOES NOT CONTAIN 'id: 1'
____________________________________
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
check data failed 3-th time, retry later
[2024/05/05 11:29:05.420 +08:00] [INFO] [main.go:220] ["1 insert success: 1100"]
[2024/05/05 11:29:05.453 +08:00] [INFO] [main.go:234] ["0 delete success: 600"]
[2024/05/05 11:29:05.454 +08:00] [INFO] [main.go:220] ["0 insert success: 1200"]
wait process cdc.test exit for 1-th time...
[2024/05/05 11:29:05.728 +08:00] [INFO] [main.go:220] ["1 insert success: 1200"]
[2024/05/05 11:29:05.731 +08:00] [INFO] [main.go:234] ["1 delete success: 600"]
[2024/05/05 11:29:05.749 +08:00] [INFO] [main.go:220] ["0 insert success: 1300"]
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.capture_session_done_during_task.cli.21775.out cli tso query --pd=http://127.0.0.1:2379
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8301/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8301 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8301 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8301
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:29:05 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/bf906f5b-50f5-4a3d-b963-cde106596e22
	{"id":"bf906f5b-50f5-4a3d-b963-cde106596e22","address":"127.0.0.1:8301","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879742}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cbf52f4b
	bf906f5b-50f5-4a3d-b963-cde106596e22

/tidb/cdc/default/default/upstream/7365352329890585818
	{"id":7365352329890585818,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/bf906f5b-50f5-4a3d-b963-cde106596e22
	{"id":"bf906f5b-50f5-4a3d-b963-cde106596e22","address":"127.0.0.1:8301","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879742}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cbf52f4b
	bf906f5b-50f5-4a3d-b963-cde106596e22

/tidb/cdc/default/default/upstream/7365352329890585818
	{"id":7365352329890585818,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/bf906f5b-50f5-4a3d-b963-cde106596e22
	{"id":"bf906f5b-50f5-4a3d-b963-cde106596e22","address":"127.0.0.1:8301","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879742}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cbf52f4b
	bf906f5b-50f5-4a3d-b963-cde106596e22

/tidb/cdc/default/default/upstream/7365352329890585818
	{"id":7365352329890585818,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
[Sun May  5 11:29:05 CST 2024] <<<<<< START cdc server in changefeed_auto_stop case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/pipeline/ProcessorSyncResolvedError=1*return(true);github.com/pingcap/tiflow/cdc/processor/ProcessorUpdatePositionDelaying=sleep(1000)'
+ (( i = 0 ))
+ (( i <= 50 ))
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.changefeed_auto_stop.1256512567.out server --log-file /tmp/tidb_cdc_test/changefeed_auto_stop/cdc2.log --log-level debug --data-dir /tmp/tidb_cdc_test/changefeed_auto_stop/cdc_data2 --cluster-id default --addr 127.0.0.1:8302 --pd http://127.0.0.1:2379
++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8302 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8302; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
[2024/05/05 11:29:06.051 +08:00] [INFO] [main.go:220] ["1 insert success: 1300"]
[2024/05/05 11:29:06.055 +08:00] [INFO] [main.go:234] ["0 delete success: 700"]
[2024/05/05 11:29:06.056 +08:00] [INFO] [main.go:220] ["0 insert success: 1400"]
table test.finish not exists for 52-th check, retry later
wait process cdc.test exit for 2-th time...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[2024/05/05 11:29:06.383 +08:00] [INFO] [main.go:220] ["1 insert success: 1400"]
[2024/05/05 11:29:06.387 +08:00] [INFO] [main.go:234] ["1 delete success: 700"]
[2024/05/05 11:29:06.394 +08:00] [INFO] [main.go:220] ["0 insert success: 1500"]
[2024/05/05 11:29:06.616 +08:00] [INFO] [main.go:234] ["0 delete success: 800"]
[2024/05/05 11:29:06.617 +08:00] [INFO] [main.go:220] ["0 insert success: 1600"]
[2024/05/05 11:29:06.627 +08:00] [INFO] [main.go:220] ["1 insert success: 1500"]
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:29:06 CST 2024] <<<<<< run test case kafka_simple_handle_key_only_avro success! >>>>>>
[2024/05/05 11:29:06.786 +08:00] [INFO] [main.go:220] ["0 insert success: 1700"]
[2024/05/05 11:29:06.837 +08:00] [INFO] [main.go:220] ["1 insert success: 1600"]
[2024/05/05 11:29:06.839 +08:00] [INFO] [main.go:234] ["1 delete success: 800"]
[2024/05/05 11:29:06.979 +08:00] [INFO] [main.go:234] ["0 delete success: 900"]
[2024/05/05 11:29:06.980 +08:00] [INFO] [main.go:220] ["0 insert success: 1800"]
[2024/05/05 11:29:07.049 +08:00] [INFO] [main.go:220] ["1 insert success: 1700"]
[2024/05/05 11:29:07.161 +08:00] [INFO] [main.go:220] ["0 insert success: 1900"]
[2024/05/05 11:29:07.237 +08:00] [INFO] [main.go:220] ["1 insert success: 1800"]
[2024/05/05 11:29:07.239 +08:00] [INFO] [main.go:234] ["1 delete success: 900"]
[2024/05/05 11:29:07.327 +08:00] [INFO] [main.go:234] ["0 delete success: 1000"]
[2024/05/05 11:29:07.328 +08:00] [INFO] [main.go:220] ["0 insert success: 2000"]
check data successfully
[2024/05/05 11:29:07.510 +08:00] [INFO] [main.go:220] ["1 insert success: 1900"]
[2024/05/05 11:29:07.657 +08:00] [INFO] [main.go:220] ["0 insert success: 2100"]
+ set +x
+ tso='449545436188901377
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545436188901377 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
[Sun May  5 11:29:07 CST 2024] <<<<<< START cdc server in capture_session_done_during_task case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/processor/processorManagerHandleNewChangefeedDelay=sleep(2000)'
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.capture_session_done_during_task.2182221824.out server --log-file /tmp/tidb_cdc_test/capture_session_done_during_task/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/capture_session_done_during_task/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
[2024/05/05 11:29:07.827 +08:00] [INFO] [main.go:220] ["1 insert success: 2000"]
[2024/05/05 11:29:07.832 +08:00] [INFO] [main.go:234] ["1 delete success: 1000"]
wait process cdc.test exit for 1-th time...
table test.finish not exists for 53-th check, retry later
[2024/05/05 11:29:07.957 +08:00] [INFO] [main.go:234] ["0 delete success: 1100"]
[2024/05/05 11:29:07.957 +08:00] [INFO] [main.go:220] ["0 insert success: 2200"]
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b312f240014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-703xz-4qz9g, pid:33697, start at 2024-05-05 11:29:05.00914637 +0800 CST m=+5.249227155	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:31:05.016 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:29:05.019 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:19:05.019 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b312f240014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-703xz-4qz9g, pid:33697, start at 2024-05-05 11:29:05.00914637 +0800 CST m=+5.249227155	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:31:05.016 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:29:05.019 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:19:05.019 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3130a40015	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-703xz-4qz9g, pid:33770, start at 2024-05-05 11:29:05.106490064 +0800 CST m=+5.293510432	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:31:05.114 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:29:05.115 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:19:05.115 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/generate_column/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/generate_column/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/generate_column/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/generate_column/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/generate_column/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
wait process cdc.test exit for 2-th time...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:29:08 CST 2024] <<<<<< run test case simple success! >>>>>>
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8302/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8302 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8302 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8302
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:29:08 GMT
< Content-Length: 1271
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/60392d77-a0a4-446b-9f90-febf3a2af283
	{"id":"60392d77-a0a4-446b-9f90-febf3a2af283","address":"127.0.0.1:8302","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879746}

/tidb/cdc/default/__cdc_meta__/capture/bf906f5b-50f5-4a3d-b963-cde106596e22
	{"id":"bf906f5b-50f5-4a3d-b963-cde106596e22","address":"127.0.0.1:8301","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879742}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cbf52f4b
	bf906f5b-50f5-4a3d-b963-cde106596e22

/tidb/cdc/default/__cdc_meta__/owner/22318f46cbf52f83
	60392d77-a0a4-446b-9f90-febf3a2af283

/tidb/cdc/default/default/upstream/7365352329890585818
	{"id":7365352329890585818,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/60392d77-a0a4-446b-9f90-febf3a2af283
	{"id":"60392d77-a0a4-446b-9f90-febf3a2af283","address":"127.0.0.1:8302","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879746}

/tidb/cdc/default/__cdc_meta__/capture/bf906f5b-50f5-4a3d-b963-cde106596e22
	{"id":"bf906f5b-50f5-4a3d-b963-cde106596e22","address":"127.0.0.1:8301","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879742}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cbf52f4b
	bf906f5b-50f5-4a3d-b963-cde106596e22

/tidb/cdc/default/__cdc_meta__/owner/22318f46cbf52f83
	60392d77-a0a4-446b-9f90-febf3a2af283

/tidb/cdc/default/default/upstream/7365352329890585818
	{"id":7365352329890585818,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/60392d77-a0a4-446b-9f90-febf3a2af283
	{"id":"60392d77-a0a4-446b-9f90-febf3a2af283","address":"127.0.0.1:8302","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879746}

/tidb/cdc/default/__cdc_meta__/capture/bf906f5b-50f5-4a3d-b963-cde106596e22
	{"id":"bf906f5b-50f5-4a3d-b963-cde106596e22","address":"127.0.0.1:8301","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879742}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cbf52f4b
	bf906f5b-50f5-4a3d-b963-cde106596e22

/tidb/cdc/default/__cdc_meta__/owner/22318f46cbf52f83
	60392d77-a0a4-446b-9f90-febf3a2af283

/tidb/cdc/default/default/upstream/7365352329890585818
	{"id":7365352329890585818,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
[Sun May  5 11:29:08 CST 2024] <<<<<< START kafka consumer in changefeed_auto_stop case >>>>>>
check_changefeed_state http://127.0.0.1:2379 ebe5336d-eb40-48f2-bd40-12d2a108783e normal null
+ endpoints=http://127.0.0.1:2379
+ changefeed_id=ebe5336d-eb40-48f2-bd40-12d2a108783e
+ expected_state=normal
+ error_msg=null
+ tls_dir=null
+ [[ http://127.0.0.1:2379 =~ https ]]
++ cdc cli changefeed query --pd=http://127.0.0.1:2379 -c ebe5336d-eb40-48f2-bd40-12d2a108783e -s
+ info='{
  "upstream_id": 7365352329890585818,
  "namespace": "default",
  "id": "ebe5336d-eb40-48f2-bd40-12d2a108783e",
  "state": "normal",
  "checkpoint_tso": 449545434117963777,
  "checkpoint_time": "2024-05-05 11:28:58.304",
  "error": null
}'
+ echo '{
  "upstream_id": 7365352329890585818,
  "namespace": "default",
  "id": "ebe5336d-eb40-48f2-bd40-12d2a108783e",
  "state": "normal",
  "checkpoint_tso": 449545434117963777,
  "checkpoint_time": "2024-05-05 11:28:58.304",
  "error": null
}'
{
  "upstream_id": 7365352329890585818,
  "namespace": "default",
  "id": "ebe5336d-eb40-48f2-bd40-12d2a108783e",
  "state": "normal",
  "checkpoint_tso": 449545434117963777,
  "checkpoint_time": "2024-05-05 11:28:58.304",
  "error": null
}
++ echo '{' '"upstream_id":' 7365352329890585818, '"namespace":' '"default",' '"id":' '"ebe5336d-eb40-48f2-bd40-12d2a108783e",' '"state":' '"normal",' '"checkpoint_tso":' 449545434117963777, '"checkpoint_time":' '"2024-05-05' '11:28:58.304",' '"error":' null '}'
++ jq -r .state
+ state=normal
+ [[ ! normal == \n\o\r\m\a\l ]]
++ echo '{' '"upstream_id":' 7365352329890585818, '"namespace":' '"default",' '"id":' '"ebe5336d-eb40-48f2-bd40-12d2a108783e",' '"state":' '"normal",' '"checkpoint_tso":' 449545434117963777, '"checkpoint_time":' '"2024-05-05' '11:28:58.304",' '"error":' null '}'
++ jq -r .error.message
+ message=null
+ [[ ! null =~ null ]]
run task successfully
table changefeed_auto_stop_1.usertable not exists for 1-th check, retry later
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/savepoint/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.generate_column.cli.35162.out cli tso query --pd=http://127.0.0.1:2379
table test.finish not exists for 54-th check, retry later
start tidb cluster in /tmp/tidb_cdc_test/savepoint
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:29:10 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/37d753cb-3763-4ebf-84e8-983c7c790a11
	{"id":"37d753cb-3763-4ebf-84e8-983c7c790a11","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879748}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cc1342f1
	37d753cb-3763-4ebf-84e8-983c7c790a11

/tidb/cdc/default/default/upstream/7365352364821904349
	{"id":7365352364821904349,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/37d753cb-3763-4ebf-84e8-983c7c790a11
	{"id":"37d753cb-3763-4ebf-84e8-983c7c790a11","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879748}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cc1342f1
	37d753cb-3763-4ebf-84e8-983c7c790a11

/tidb/cdc/default/default/upstream/7365352364821904349
	{"id":7365352364821904349,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/37d753cb-3763-4ebf-84e8-983c7c790a11
	{"id":"37d753cb-3763-4ebf-84e8-983c7c790a11","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879748}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cc1342f1
	37d753cb-3763-4ebf-84e8-983c7c790a11

/tidb/cdc/default/default/upstream/7365352364821904349
	{"id":7365352364821904349,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
table changefeed_auto_stop_1.usertable not exists for 2-th check, retry later
+ set +x
+ tso='449545437294100481
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545437294100481 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
[Sun May  5 11:29:11 CST 2024] <<<<<< START cdc server in generate_column case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.generate_column.3519635198.out server --log-file /tmp/tidb_cdc_test/generate_column/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/generate_column/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
[Sun May  5 11:29:12 CST 2024] <<<<<< START kafka consumer in capture_session_done_during_task case >>>>>>
lease 22318f46cc1342f1 revoked
table test.finish not exists for 55-th check, retry later
table capture_session_done_during_task.t exists
check diff failed 1-th time, retry later
Verifying downstream PD is started...
table changefeed_auto_stop_1.usertable exists
table changefeed_auto_stop_2.usertable not exists for 1-th check, retry later
table test.finish not exists for 56-th check, retry later
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:29:14 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/8e17a1b8-b72d-4a18-879a-bc9a7c044d58
	{"id":"8e17a1b8-b72d-4a18-879a-bc9a7c044d58","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879752}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cc1f6ed9
	8e17a1b8-b72d-4a18-879a-bc9a7c044d58

/tidb/cdc/default/default/upstream/7365352377172974451
	{"id":7365352377172974451,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/8e17a1b8-b72d-4a18-879a-bc9a7c044d58
	{"id":"8e17a1b8-b72d-4a18-879a-bc9a7c044d58","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879752}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cc1f6ed9
	8e17a1b8-b72d-4a18-879a-bc9a7c044d58

/tidb/cdc/default/default/upstream/7365352377172974451
	{"id":7365352377172974451,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/8e17a1b8-b72d-4a18-879a-bc9a7c044d58
	{"id":"8e17a1b8-b72d-4a18-879a-bc9a7c044d58","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879752}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cc1f6ed9
	8e17a1b8-b72d-4a18-879a-bc9a7c044d58

/tidb/cdc/default/default/upstream/7365352377172974451
	{"id":7365352377172974451,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.generate_column.cli.35251.out cli changefeed create --start-ts=449545437294100481 '--sink-uri=kafka://127.0.0.1:9092/ticdc-generate-column-test-4500?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760'
check diff failed 2-th time, retry later
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Create changefeed successfully!
ID: a234c858-0a08-460f-9df3-71ee86e6f76a
Info: {"upstream_id":7365352377172974451,"namespace":"default","id":"a234c858-0a08-460f-9df3-71ee86e6f76a","sink_uri":"kafka://127.0.0.1:9092/ticdc-generate-column-test-4500?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:29:15.45094132+08:00","start_ts":449545437294100481,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545437294100481,"checkpoint_ts":449545437294100481,"checkpoint_time":"2024-05-05 11:29:10.420"}
PASS
table changefeed_auto_stop_2.usertable exists
table changefeed_auto_stop_3.usertable not exists for 1-th check, retry later
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
\033[0;36m<<< Run all test success >>>\033[0m
[Pipeline] }
Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc already exists)
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
[Pipeline] }
table test.finish not exists for 57-th check, retry later
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
+ set +x
[Sun May  5 11:29:16 CST 2024] <<<<<< START kafka consumer in generate_column case >>>>>>
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] // withEnv
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
table changefeed_auto_stop_3.usertable exists
table changefeed_auto_stop_4.usertable not exists for 1-th check, retry later
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
check diff failed 3-th time, retry later
[2024/05/05 11:29:16.554 +08:00] [INFO] [main.go:78] ["runDDLTest take 1m7.535944032s"]
table mark.finish_mark_0 exists
table mark.finish_mark_1 exists
table mark.finish_mark_2 exists
table mark.finish_mark_3 exists
table mark.finish_mark_4 exists
table mark.finish_mark not exists for 1-th check, retry later
table test.finish not exists for 58-th check, retry later
table generate_column.t not exists for 1-th check, retry later
table changefeed_auto_stop_4.usertable exists
check diff failed 1-th time, retry later
table mark.finish_mark not exists for 2-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
check diff successfully
check diff failed 1-th time, retry later
table test.finish not exists for 59-th check, retry later
[2024/05/05 11:29:19.342 +08:00] [WARN] [diff.go:182] ["table struct is not equal"] [reason="column num not equal, one is 4 another is 3"]
table generate_column.t exists
table generate_column.t1 exists
check diff failed 1-th time, retry later
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/cdc_server_tips/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
check diff successfully
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table mark.finish_mark exists
check diff successfully
wait process cdc.test exit for 1-th time...
wait process cdc.test exit for 1-th time...
check diff successfully
wait process cdc.test exit for 1-th time...
table test.finish not exists for 60-th check, retry later
wait process cdc.test exit for 2-th time...
wait process cdc.test exit for 2-th time...
wait process cdc.test exit for 2-th time...
wait process cdc.test exit for 3-th time...
check diff successfully
wait process cdc.test exit for 1-th time...
wait process cdc.test exit for 3-th time...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:29:23 CST 2024] <<<<<< run test case capture_session_done_during_task success! >>>>>>
cdc.test: no process found
wait process cdc.test exit for 4-th time...
process cdc.test already exit
[Sun May  5 11:29:23 CST 2024] <<<<<< run test case changefeed_auto_stop success! >>>>>>
cdc.test: no process found
wait process cdc.test exit for 4-th time...
process cdc.test already exit
[Sun May  5 11:29:23 CST 2024] <<<<<< run test case multi_source success! >>>>>>
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3244e80014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3, pid:14886, start at 2024-05-05 11:29:22.787442048 +0800 CST m=+5.429319238	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:31:22.794 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:29:22.796 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:19:22.796 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
wait process cdc.test exit for 2-th time...
start tidb cluster in /tmp/tidb_cdc_test/cdc_server_tips
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:29:24 CST 2024] <<<<<< run test case generate_column success! >>>>>>
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_handle_key_only_avro/run.sh: line 1: 16188 Killed                  cdc_kafka_consumer --upstream-uri $SINK_URI --downstream-uri="mysql://root@127.0.0.1:3306/?safe-mode=true&batch-dml-enable=false" --upstream-tidb-dsn="root@tcp(${UP_TIDB_HOST}:${UP_TIDB_PORT})/?" --config="$CUR/conf/changefeed.toml" 2>&1
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_claim_check/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
table test.finish not exists for 61-th check, retry later
++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   221  100   221    0     0   2570      0 --:--:-- --:--:-- --:--:--  2600
+ synced_status='{"synced":true,"sink_checkpoint_ts":"2024-05-05 11:29:12.389","puller_resolved_ts":"2024-05-05 11:29:05.390","last_synced_ts":"2024-05-05 11:26:56.540","now_ts":"2024-05-05 11:29:13.000","info":"Data syncing is finished"}'
++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-05' '11:29:12.389","puller_resolved_ts":"2024-05-05' '11:29:05.390","last_synced_ts":"2024-05-05' '11:26:56.540","now_ts":"2024-05-05' '11:29:13.000","info":"Data' syncing is 'finished"}'
++ jq .synced
+ status=true
+ '[' true '!=' true ']'
+ kill_pd
++ ps aux
++ grep pd-server
++ grep /tmp/tidb_cdc_test/synced_status_with_redo
+ info='jenkins     9915  7.3  0.0 13653076 141572 ?     Sl   11:26   0:11 pd-server --advertise-client-urls http://127.0.0.1:2379 --client-urls http://0.0.0.0:2379 --advertise-peer-urls http://127.0.0.1:2380 --peer-urls http://0.0.0.0:2380 --config /tmp/tidb_cdc_test/synced_status_with_redo/pd-config.toml --log-file /tmp/tidb_cdc_test/synced_status_with_redo/pd1.log --data-dir /tmp/tidb_cdc_test/synced_status_with_redo/pd1 --name=pd1 --initial-cluster=pd1=http://127.0.0.1:2380
jenkins     9972  4.9  0.0 13587348 142300 ?     Sl   11:26   0:07 pd-server --advertise-client-urls http://127.0.0.1:2479 --client-urls http://0.0.0.0:2479 --advertise-peer-urls http://127.0.0.1:2480 --peer-urls http://0.0.0.0:2480 --config /tmp/tidb_cdc_test/synced_status_with_redo/pd-config.toml --log-file /tmp/tidb_cdc_test/synced_status_with_redo/down_pd.log --data-dir /tmp/tidb_cdc_test/synced_status_with_redo/down_pd'
++ ps aux
++ grep pd-server
++ grep /tmp/tidb_cdc_test/synced_status_with_redo
++ awk '{print $2}'
++ xargs kill -9
+ sleep 20
{"level":"warn","ts":1714879761.408732,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc004580a80/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"info","ts":1714879761.4088013,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"}
{"level":"warn","ts":1714879761.4304442,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0023968c0/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"info","ts":1714879761.4305022,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"}
{"level":"warn","ts":1714879761.5048592,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0005cb880/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""}
{"level":"info","ts":1714879761.5049047,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"}
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3244e80014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3, pid:14886, start at 2024-05-05 11:29:22.787442048 +0800 CST m=+5.429319238	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:31:22.794 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:29:22.796 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:19:22.796 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3244ac0015	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3, pid:14972, start at 2024-05-05 11:29:22.776673141 +0800 CST m=+5.341886217	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:31:22.784 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:29:22.781 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:19:22.781 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/savepoint/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/savepoint/tiflash/log/error.log
arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/savepoint/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/savepoint/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/savepoint/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
{"level":"warn","ts":"2024-05-05T11:29:26.269165+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000f22380/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:29:26.270358+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001322000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:29:26.326013+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00034c8c0/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""}
table test.finish not exists for 62-th check, retry later
start tidb cluster in /tmp/tidb_cdc_test/kafka_simple_claim_check
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.savepoint.cli.16372.out cli tso query --pd=http://127.0.0.1:2379
table test.finish not exists for 63-th check, retry later
+ set +x
+ tso='449545441967079426
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545441967079426 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
[Sun May  5 11:29:29 CST 2024] <<<<<< START cdc server in savepoint case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ GO_FAILPOINTS=
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.savepoint.1640916411.out server --log-file /tmp/tidb_cdc_test/savepoint/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/savepoint/cdc_data --cluster-id default
+ [[ no != \n\o ]]
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
table test.finish not exists for 64-th check, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 65-th check, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:29:32 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/c5ee8d68-e6f4-4e08-92e9-4dedf3761a9a
	{"id":"c5ee8d68-e6f4-4e08-92e9-4dedf3761a9a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879770}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cc634fdb
	c5ee8d68-e6f4-4e08-92e9-4dedf3761a9a

/tidb/cdc/default/default/upstream/7365352448872986867
	{"id":7365352448872986867,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/c5ee8d68-e6f4-4e08-92e9-4dedf3761a9a
	{"id":"c5ee8d68-e6f4-4e08-92e9-4dedf3761a9a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879770}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cc634fdb
	c5ee8d68-e6f4-4e08-92e9-4dedf3761a9a

/tidb/cdc/default/default/upstream/7365352448872986867
	{"id":7365352448872986867,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/c5ee8d68-e6f4-4e08-92e9-4dedf3761a9a
	{"id":"c5ee8d68-e6f4-4e08-92e9-4dedf3761a9a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879770}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cc634fdb
	c5ee8d68-e6f4-4e08-92e9-4dedf3761a9a

/tidb/cdc/default/default/upstream/7365352448872986867
	{"id":7365352448872986867,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.savepoint.cli.16465.out cli changefeed create --start-ts=449545441967079426 '--sink-uri=kafka://127.0.0.1:9092/ticdc-savepoint-test-19083?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760'
{"level":"warn","ts":"2024-05-05T11:29:32.270622+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000f22380/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:29:32.272251+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001322000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:29:32.326599+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00034c8c0/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""}
Create changefeed successfully!
ID: e487c8e2-d220-458d-934d-53a9bb81e1f8
Info: {"upstream_id":7365352448872986867,"namespace":"default","id":"e487c8e2-d220-458d-934d-53a9bb81e1f8","sink_uri":"kafka://127.0.0.1:9092/ticdc-savepoint-test-19083?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:29:33.297537109+08:00","start_ts":449545441967079426,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545441967079426,"checkpoint_ts":449545441967079426,"checkpoint_time":"2024-05-05 11:29:28.246"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed
+ set +x
[Sun May  5 11:29:34 CST 2024] <<<<<< START kafka consumer in savepoint case >>>>>>
table test.finish not exists for 66-th check, retry later
table savepoint.finish_mark not exists for 1-th check, retry later
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/ddl_attributes/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
\033[0;36m<<< Run all test success >>>\033[0m
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
\033[0;36m<<< Run all test success >>>\033[0m
[Pipeline] }
Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc already exists)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[Pipeline] // cache
[Pipeline] }
[Pipeline] }
Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc already exists)
[Pipeline] // dir
[Pipeline] // cache
[Pipeline] }
[Pipeline] }
[Pipeline] // dir
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] }
[Pipeline] // timeout
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] }
\033[0;36m<<< Run all test success >>>\033[0m
[Pipeline] // stage
[Pipeline] // timeout
table test.finish not exists for 67-th check, retry later
[Pipeline] }
[Pipeline] }
[Pipeline] }
Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc already exists)
[Pipeline] // container
[Pipeline] // stage
[Pipeline] // cache
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] // container
[Pipeline] // dir
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // node
[Pipeline] // withEnv
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // podTemplate
table savepoint.finish_mark not exists for 2-th check, retry later
[Pipeline] // node
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[Pipeline] // timeout
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] // podTemplate
[Pipeline] // stage
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // stage
[Pipeline] // withEnv
[Pipeline] // container
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // stage
[Pipeline] // withEnv
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3321e80019	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:24257, start at 2024-05-05 11:29:36.935171772 +0800 CST m=+5.240468034	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:31:36.942 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:29:36.940 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:19:36.940 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
[Pipeline] }
[Pipeline] }
[Pipeline] // node
[Pipeline] }

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:01 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:02 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:03 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:04 --:--:--     0{"level":"warn","ts":"2024-05-05T11:29:38.271325+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000f22380/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:29:38.272986+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001322000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:29:38.32874+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00034c8c0/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""}
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[2024/05/05 11:29:38.035 +08:00] [INFO] [dailytest.go:68] ["test pass!!!"]
[Pipeline] // stage
[Pipeline] }
table test.finish not exists for 68-th check, retry later
start tidb cluster in /tmp/tidb_cdc_test/ddl_attributes
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
wait process cdc.test exit for 1-th time...
wait process cdc.test exit for 2-th time...
table savepoint.finish_mark exists
check diff successfully
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3334bc0010	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:17370, start at 2024-05-05 11:29:38.108712361 +0800 CST m=+5.723536636	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:31:38.114 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:29:38.095 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:19:38.095 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3334bc0010	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:17370, start at 2024-05-05 11:29:38.108712361 +0800 CST m=+5.723536636	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:31:38.114 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:29:38.095 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:19:38.095 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b332d940010	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:17449, start at 2024-05-05 11:29:37.65602328 +0800 CST m=+5.219857498	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:31:37.665 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:29:37.637 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:19:37.637 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
wait process cdc.test exit for 1-th time...
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/kafka_simple_claim_check/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/kafka_simple_claim_check/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_simple_claim_check/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_simple_claim_check/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_simple_claim_check/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
wait process cdc.test exit for 3-th time...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3321e80019	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:24257, start at 2024-05-05 11:29:36.935171772 +0800 CST m=+5.240468034	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:31:36.942 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:29:36.940 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:19:36.940 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3321b00015	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:24336, start at 2024-05-05 11:29:36.923350899 +0800 CST m=+5.176041043	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:31:36.930 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:29:36.926 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:19:36.926 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/cdc_server_tips/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/cdc_server_tips/tiflash/log/error.log
arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/cdc_server_tips/tiflash-proxy.toml"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/cdc_server_tips/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/cdc_server_tips/tiflash/log/proxy.log"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
wait process cdc.test exit for 2-th time...
cdc.test: no process found
wait process cdc.test exit for 4-th time...
process cdc.test already exit
[Sun May  5 11:29:40 CST 2024] <<<<<< run test case cdc success! >>>>>>
table test.finish not exists for 69-th check, retry later
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:29:40 CST 2024] <<<<<< run test case savepoint success! >>>>>>
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cdc_server_tips.cli.25794.out cli tso query --pd=http://127.0.0.1:2379
[Sun May  5 11:29:41 CST 2024] <<<<<< START cdc server in kafka_simple_claim_check case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ GO_FAILPOINTS=
+ (( i = 0 ))
+ (( i <= 50 ))
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_claim_check.1879018792.out server --log-file /tmp/tidb_cdc_test/kafka_simple_claim_check/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_simple_claim_check/cdc_data --cluster-id default
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
table test.finish not exists for 70-th check, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)

  0     0    0     0    0     0      0      0 --:--:--  0:00:05 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:06 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:07 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:08 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:09 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:10 --:--:--     0{"level":"warn","ts":"2024-05-05T11:29:44.272363+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000f22380/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:29:44.273555+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001322000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:29:44.329883+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00034c8c0/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""}
+ set +x
+ tso='449545445661736961
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545445661736961 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
try a VALID cdc server command
[Sun May  5 11:29:44 CST 2024] <<<<<< START cdc server in cdc_server_tips case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ GO_FAILPOINTS=
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cdc_server_tips.2583825840.out server --log-file /tmp/tidb_cdc_test/cdc_server_tips/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/cdc_server_tips/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
table test.finish not exists for 71-th check, retry later
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:29:44 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/361e547c-4328-47d7-8210-68fce3ca5dce
	{"id":"361e547c-4328-47d7-8210-68fce3ca5dce","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879782}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cca2f9cb
	361e547c-4328-47d7-8210-68fce3ca5dce

/tidb/cdc/default/default/upstream/7365352528581942717
	{"id":7365352528581942717,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/361e547c-4328-47d7-8210-68fce3ca5dce
	{"id":"361e547c-4328-47d7-8210-68fce3ca5dce","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879782}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cca2f9cb
	361e547c-4328-47d7-8210-68fce3ca5dce

/tidb/cdc/default/default/upstream/7365352528581942717
	{"id":7365352528581942717,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/361e547c-4328-47d7-8210-68fce3ca5dce
	{"id":"361e547c-4328-47d7-8210-68fce3ca5dce","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879782}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cca2f9cb
	361e547c-4328-47d7-8210-68fce3ca5dce

/tidb/cdc/default/default/upstream/7365352528581942717
	{"id":7365352528581942717,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_claim_check.cli.18845.out cli tso query --pd=http://127.0.0.1:2379
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)

  0     0    0     0    0     0      0      0 --:--:--  0:00:11 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:12 --:--:--     0{"level":"warn","ts":"2024-05-05T11:29:46.259342+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000f22380/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"info","ts":"2024-05-05T11:29:46.25938+0800","logger":"etcd-client","caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"}
{"level":"warn","ts":"2024-05-05T11:29:46.263528+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001322000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"info","ts":"2024-05-05T11:29:46.263567+0800","logger":"etcd-client","caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"}
{"level":"warn","ts":"2024-05-05T11:29:46.315241+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00034c8c0/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""}
{"level":"info","ts":"2024-05-05T11:29:46.315281+0800","logger":"etcd-client","caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"}
table test.finish not exists for 72-th check, retry later
+ set +x
+ tso='449545446423003139
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545446423003139 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_claim_check.cli.18893.out cli changefeed create --start-ts=449545446423003139 '--sink-uri=kafka://127.0.0.1:9092/kafka-simple-claim-check-14905?protocol=simple' -c kafka-simple-claim-check --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_claim_check/conf/changefeed.toml
Create changefeed successfully!
ID: kafka-simple-claim-check
Info: {"upstream_id":7365352528581942717,"namespace":"default","id":"kafka-simple-claim-check","sink_uri":"kafka://127.0.0.1:9092/kafka-simple-claim-check-14905?protocol=simple","create_time":"2024-05-05T11:29:47.153360114+08:00","start_ts":449545446423003139,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"simple","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"kafka_config":{"large_message_handle":{"large_message_handle_option":"claim-check","large_message_handle_compression":"snappy","claim_check_storage_uri":"file:///tmp/kafka-simple-claim-check"}},"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545446423003139,"checkpoint_ts":449545446423003139,"checkpoint_time":"2024-05-05 11:29:45.244"}
PASS
coverage: 2.5% of statements in github.com/pingcap/tiflow/...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:29:47 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/dc3c428e-baf7-4a12-b2d9-2ea9ba0e5257
	{"id":"dc3c428e-baf7-4a12-b2d9-2ea9ba0e5257","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879784}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cc963ce3
	dc3c428e-baf7-4a12-b2d9-2ea9ba0e5257

/tidb/cdc/default/default/upstream/7365352520243140506
	{"id":7365352520243140506,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/dc3c428e-baf7-4a12-b2d9-2ea9ba0e5257
	{"id":"dc3c428e-baf7-4a12-b2d9-2ea9ba0e5257","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879784}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cc963ce3
	dc3c428e-baf7-4a12-b2d9-2ea9ba0e5257

/tidb/cdc/default/default/upstream/7365352520243140506
	{"id":7365352520243140506,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/dc3c428e-baf7-4a12-b2d9-2ea9ba0e5257
	{"id":"dc3c428e-baf7-4a12-b2d9-2ea9ba0e5257","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879784}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cc963ce3
	dc3c428e-baf7-4a12-b2d9-2ea9ba0e5257

/tidb/cdc/default/default/upstream/7365352520243140506
	{"id":7365352520243140506,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
table test.finish not exists for 73-th check, retry later
+ set +x
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b33d5280012	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-klk36-npv9p, pid:23333, start at 2024-05-05 11:29:48.38483018 +0800 CST m=+5.280597168	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:31:48.398 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:29:48.362 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:19:48.362 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)

  0     0    0     0    0     0      0      0 --:--:--  0:00:13 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:14 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:15 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:16 --:--:--     0{"level":"warn","ts":"2024-05-05T11:29:50.273067+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000f22380/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:29:50.273811+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001322000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:29:50.330319+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00034c8c0/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""}
table test.finish not exists for 74-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b33d5280012	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-klk36-npv9p, pid:23333, start at 2024-05-05 11:29:48.38483018 +0800 CST m=+5.280597168	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:31:48.398 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:29:48.362 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:19:48.362 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b33d5e40016	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-klk36-npv9p, pid:23418, start at 2024-05-05 11:29:48.440661828 +0800 CST m=+5.283549630	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:31:48.446 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:29:48.409 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:19:48.409 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/ddl_attributes/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/ddl_attributes/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/ddl_attributes/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/ddl_attributes/tiflash/log/proxy.log"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/ddl_attributes/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table test.finish not exists for 75-th check, retry later
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/split_region/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status/run.sh using Sink-Type: kafka... <<=================
+++ dirname /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status/run.sh
++ cd /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status
++ pwd
+ CUR=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status
+ source /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status/../_utils/test_prepare
++ UP_TIDB_HOST=127.0.0.1
++ UP_TIDB_PORT=4000
++ UP_TIDB_OTHER_PORT=4001
++ UP_TIDB_STATUS=10080
++ UP_TIDB_OTHER_STATUS=10081
++ DOWN_TIDB_HOST=127.0.0.1
++ DOWN_TIDB_PORT=3306
++ DOWN_TIDB_STATUS=20080
++ TLS_TIDB_HOST=127.0.0.1
++ TLS_TIDB_PORT=3307
++ TLS_TIDB_STATUS=30080
++ UP_PD_HOST_1=127.0.0.1
++ UP_PD_PORT_1=2379
++ UP_PD_PEER_PORT_1=2380
++ UP_PD_HOST_2=127.0.0.1
++ UP_PD_PORT_2=2679
++ UP_PD_PEER_PORT_2=2680
++ UP_PD_HOST_3=127.0.0.1
++ UP_PD_PORT_3=2779
++ UP_PD_PEER_PORT_3=2780
++ DOWN_PD_HOST=127.0.0.1
++ DOWN_PD_PORT=2479
++ DOWN_PD_PEER_PORT=2480
++ TLS_PD_HOST=127.0.0.1
++ TLS_PD_PORT=2579
++ TLS_PD_PEER_PORT=2580
++ UP_TIKV_HOST_1=127.0.0.1
++ UP_TIKV_PORT_1=20160
++ UP_TIKV_STATUS_PORT_1=20181
++ UP_TIKV_HOST_2=127.0.0.1
++ UP_TIKV_PORT_2=20161
++ UP_TIKV_STATUS_PORT_2=20182
++ UP_TIKV_HOST_3=127.0.0.1
++ UP_TIKV_PORT_3=20162
++ UP_TIKV_STATUS_PORT_3=20183
++ DOWN_TIKV_HOST=127.0.0.1
++ DOWN_TIKV_PORT=21160
++ DOWN_TIKV_STATUS_PORT=21180
++ TLS_TIKV_HOST=127.0.0.1
++ TLS_TIKV_PORT=22160
++ TLS_TIKV_STATUS_PORT=22180
+++ cat /tmp/tidb_cdc_test/KAFKA_VERSION
+++ echo 2.4.1
++ KAFKA_VERSION=2.4.1
+ WORK_DIR=/tmp/tidb_cdc_test/synced_status
+ CDC_BINARY=cdc.test
+ SINK_TYPE=kafka
+ CDC_COUNT=3
+ DB_COUNT=4
+ trap stop_tidb_cluster EXIT
+ run_normal_case_and_unavailable_pd conf/changefeed.toml
+ rm -rf /tmp/tidb_cdc_test/synced_status
+ mkdir -p /tmp/tidb_cdc_test/synced_status
+ start_tidb_cluster --workdir /tmp/tidb_cdc_test/synced_status
The 1 times to try to start tidb cluster...
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_attributes.cli.24765.out cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_claim_check.cli.18945.out cli changefeed pause -c kafka-simple-claim-check
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
table test.finish not exists for 76-th check, retry later
+ set +x
+ tso='449545448643100673
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545448643100673 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
[Sun May  5 11:29:55 CST 2024] <<<<<< START cdc server in ddl_attributes case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_attributes.2480024802.out server --log-file /tmp/tidb_cdc_test/ddl_attributes/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/ddl_attributes/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
11:29AM INF > Run case=sql/debezium/binary_column_test.sql
start tidb cluster in /tmp/tidb_cdc_test/split_region
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_claim_check.cli.18978.out cli changefeed update -c kafka-simple-claim-check '--sink-uri=kafka://127.0.0.1:9092/kafka-simple-claim-check-14905?protocol=simple&max-message-bytes=2048' --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_claim_check/conf/changefeed.toml --no-confirm
start tidb cluster in /tmp/tidb_cdc_test/synced_status
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...

  0     0    0     0    0     0      0      0 --:--:--  0:00:17 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:18 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:19 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:20 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:21 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:22 --:--:--     0{"level":"warn","ts":"2024-05-05T11:29:56.274437+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000f22380/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:29:56.274826+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001322000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:29:56.331885+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00034c8c0/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""}
Diff of changefeed config:
{Type:update Path:[SinkURI] From:kafka://127.0.0.1:9092/kafka-simple-claim-check-14905?protocol=simple To:kafka://127.0.0.1:9092/kafka-simple-claim-check-14905?protocol=simple&max-message-bytes=2048}
{Type:update Path:[Config SyncPointInterval] From:<nil> To:0xc001484038}
{Type:update Path:[Config SyncPointRetention] From:<nil> To:0xc001484068}
{Type:update Path:[Config Consistent] From:<nil> To:0xc000e26540}
{"level":"warn","ts":1714879796.4098952,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc004580a80/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"info","ts":1714879796.409926,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"}
{"level":"warn","ts":1714879796.4322808,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0023968c0/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"info","ts":1714879796.4323182,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"}
{"level":"warn","ts":1714879796.5062406,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0005cb880/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""}
{"level":"info","ts":1714879796.5062728,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"}
Update changefeed config successfully! 
ID: kafka-simple-claim-check
Info: {"upstream_id":7365352528581942717,"namespace":"default","id":"kafka-simple-claim-check","sink_uri":"kafka://127.0.0.1:9092/kafka-simple-claim-check-14905?protocol=simple\u0026max-message-bytes=2048","create_time":"2024-05-05T11:29:47.153360114+08:00","start_ts":449545446423003139,"admin_job_type":1,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_table_monitor":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"simple","encoder_concurrency":32,"terminator":"\r\n","enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"kafka_config":{"large_message_handle":{"large_message_handle_option":"claim-check","large_message_handle_compression":"snappy","claim_check_storage_uri":"file:///tmp/kafka-simple-claim-check"}},"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"stopped","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":0,"checkpoint_ts":449545448611905541,"checkpoint_time":"2024-05-05 11:29:53.594"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
table test.finish not exists for 77-th check, retry later
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_claim_check.cli.19026.out cli changefeed resume -c kafka-simple-claim-check
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:29:58 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/7a900720-c7bf-4bd4-9a6e-9c968b6f6713
	{"id":"7a900720-c7bf-4bd4-9a6e-9c968b6f6713","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879795}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ccceccd4
	7a900720-c7bf-4bd4-9a6e-9c968b6f6713

/tidb/cdc/default/default/upstream/7365352574844652408
	{"id":7365352574844652408,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/7a900720-c7bf-4bd4-9a6e-9c968b6f6713
	{"id":"7a900720-c7bf-4bd4-9a6e-9c968b6f6713","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879795}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ccceccd4
	7a900720-c7bf-4bd4-9a6e-9c968b6f6713

/tidb/cdc/default/default/upstream/7365352574844652408
	{"id":7365352574844652408,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/7a900720-c7bf-4bd4-9a6e-9c968b6f6713
	{"id":"7a900720-c7bf-4bd4-9a6e-9c968b6f6713","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879795}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ccceccd4
	7a900720-c7bf-4bd4-9a6e-9c968b6f6713

/tidb/cdc/default/default/upstream/7365352574844652408
	{"id":7365352574844652408,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.ddl_attributes.cli.24852.out cli changefeed create --start-ts=449545448643100673 '--sink-uri=kafka://127.0.0.1:9092/ticdc-ddl-attributes-test-5439?protocol=open-protocol&partition-num=4&kafka-version=2.4.1'
PASS
table test.finish not exists for 78-th check, retry later
Create changefeed successfully!
ID: 43f5ffa9-97ce-4313-aa2c-304fcd19ed22
Info: {"upstream_id":7365352574844652408,"namespace":"default","id":"43f5ffa9-97ce-4313-aa2c-304fcd19ed22","sink_uri":"kafka://127.0.0.1:9092/ticdc-ddl-attributes-test-5439?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1","create_time":"2024-05-05T11:29:58.736494976+08:00","start_ts":449545448643100673,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545448643100673,"checkpoint_ts":449545448643100673,"checkpoint_time":"2024-05-05 11:29:53.713"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
Verifying downstream PD is started...
coverage: 2.1% of statements in github.com/pingcap/tiflow/...
11:29AM INF > Run case=sql/debezium/binary_mode_test.sql
+ set +x
[Sun May  5 11:30:00 CST 2024] <<<<<< START kafka consumer in ddl_attributes case >>>>>>
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
+ set +x
table test.finish_mark not exists for 1-th check, retry later
table test.finish not exists for 79-th check, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)

  0     0    0     0    0     0      0      0 --:--:--  0:00:23 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:24 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:25 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:26 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:27 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:28 --:--:--     0{"level":"warn","ts":"2024-05-05T11:30:02.275346+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc001322000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:30:02.275545+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc000f22380/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:30:02.333351+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00034c8c0/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""}
table test.finish not exists for 80-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish_mark not exists for 2-th check, retry later

  0     0    0     0    0     0      0      0 --:--:--  0:00:29 --:--:--     0
100   135  100   135    0     0      4      0  0:00:33  0:00:30  0:00:03    27
100   135  100   135    0     0      4      0  0:00:33  0:00:30  0:00:03    33
+ synced_status='{
    "error_msg": "[CDC:ErrPDEtcdAPIError]etcd api call error: context deadline exceeded",
    "error_code": "CDC:ErrPDEtcdAPIError"
}'
++ echo '{' '"error_msg":' '"[CDC:ErrPDEtcdAPIError]etcd' api call error: context deadline 'exceeded",' '"error_code":' '"CDC:ErrPDEtcdAPIError"' '}'
++ jq -r .error_code
+ error_code=CDC:ErrPDEtcdAPIError
+ cleanup_process cdc.test
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
wait process cdc.test exit for 1-th time...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 81-th check, retry later
wait process cdc.test exit for 2-th time...
11:30AM INF > Run case=sql/debezium/connector_test.sql
table test.finish_mark exists
check diff successfully
wait process cdc.test exit for 1-th time...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
+ stop_tidb_cluster
wait process cdc.test exit for 2-th time...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b34dfb80010	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-1nvp9-6cgzr, pid:23985, start at 2024-05-05 11:30:05.439606897 +0800 CST m=+5.080864285	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:32:05.447 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:30:05.422 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:20:05.422 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b34dfb80010	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-1nvp9-6cgzr, pid:23985, start at 2024-05-05 11:30:05.439606897 +0800 CST m=+5.080864285	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:32:05.447 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:30:05.422 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:20:05.422 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b34e1240014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-1nvp9-6cgzr, pid:24068, start at 2024-05-05 11:30:05.541359355 +0800 CST m=+5.128448684	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:32:05.548 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:30:05.513 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:20:05.513 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:30:06 CST 2024] <<<<<< run test case kafka_simple_claim_check success! >>>>>>
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/split_region/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/split_region/tiflash/log/error.log
arg matches is ArgMatches { args: {"addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/split_region/tiflash-proxy.toml"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/split_region/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/split_region/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table ddl_attributes.attributes_t1_new not exists for 1-th check, retry later
table test.finish not exists for 82-th check, retry later
table ddl_attributes.attributes_t1_new not exists for 2-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 83-th check, retry later
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.split_region.cli.25518.out cli tso query --pd=http://127.0.0.1:2379
table ddl_attributes.attributes_t1_new not exists for 3-th check, retry later
valid ~~~ running cdc  
Failed to start cdc, the usage tips should be printed
 1st test case cdc_server_tips success! 
try an INVALID cdc server command
[Sun May  5 11:30:07 CST 2024] <<<<<< START cdc server in cdc_server_tips case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ true != \n\o ]]
+ set +x
+ GO_FAILPOINTS=
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.cdc_server_tips.2593625938.out server --log-file /tmp/tidb_cdc_test/cdc_server_tips/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/cdc_server_tips/cdc_data --cluster-id default --pd None
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b34f99c0019	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3, pid:17660, start at 2024-05-05 11:30:07.116913714 +0800 CST m=+5.163223398	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:32:07.123 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:30:07.129 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:20:07.129 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b34f99c0019	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3, pid:17660, start at 2024-05-05 11:30:07.116913714 +0800 CST m=+5.163223398	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:32:07.123 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:30:07.129 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:20:07.129 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b34fb100010	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3, pid:17731, start at 2024-05-05 11:30:07.187971826 +0800 CST m=+5.184194512	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:32:07.194 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:30:07.172 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:20:07.172 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/synced_status/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/synced_status/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/synced_status/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/synced_status/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/synced_status/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
+ set +x
+ tso='449545452642893825
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545452642893825 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
[Sun May  5 11:30:10 CST 2024] <<<<<< START cdc server in split_region case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ GO_FAILPOINTS=
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.split_region.2556325565.out server --log-file /tmp/tidb_cdc_test/split_region/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/split_region/cdc_data --cluster-id default
+ [[ no != \n\o ]]
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
table test.finish not exists for 84-th check, retry later
table ddl_attributes.attributes_t1_new exists
table ddl_attributes.finish_mark not exists for 1-th check, retry later
+ cd /tmp/tidb_cdc_test/synced_status
++ run_cdc_cli_tso_query 127.0.0.1 2379
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status.cli.19139.out cli tso query --pd=http://127.0.0.1:2379
table test.finish not exists for 85-th check, retry later
table ddl_attributes.finish_mark not exists for 2-th check, retry later
+ set +x
+ tso='449545453588709377
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545453588709377 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
+ start_ts=449545453588709377
+ run_cdc_server --workdir /tmp/tidb_cdc_test/synced_status --binary cdc.test
[Sun May  5 11:30:14 CST 2024] <<<<<< START cdc server in synced_status case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status.1917919181.out server --log-file /tmp/tidb_cdc_test/synced_status/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/synced_status/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:30:13 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/e63cd2de-9452-4e3b-917d-8cdd411673b8
	{"id":"e63cd2de-9452-4e3b-917d-8cdd411673b8","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879811}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cd1042f3
	e63cd2de-9452-4e3b-917d-8cdd411673b8

/tidb/cdc/default/default/upstream/7365352647684945719
	{"id":7365352647684945719,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/e63cd2de-9452-4e3b-917d-8cdd411673b8
	{"id":"e63cd2de-9452-4e3b-917d-8cdd411673b8","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879811}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cd1042f3
	e63cd2de-9452-4e3b-917d-8cdd411673b8

/tidb/cdc/default/default/upstream/7365352647684945719
	{"id":7365352647684945719,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/e63cd2de-9452-4e3b-917d-8cdd411673b8
	{"id":"e63cd2de-9452-4e3b-917d-8cdd411673b8","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879811}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cd1042f3
	e63cd2de-9452-4e3b-917d-8cdd411673b8

/tidb/cdc/default/default/upstream/7365352647684945719
	{"id":7365352647684945719,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.split_region.cli.25627.out cli changefeed create --start-ts=449545452642893825 '--sink-uri=kafka://127.0.0.1:9092/ticdc-split-region-test-2596?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --config /tmp/tidb_cdc_test/split_region/pulsar_test.toml
Create changefeed successfully!
ID: e82a2e71-fd51-4136-baf0-3d5173cd5500
Info: {"upstream_id":7365352647684945719,"namespace":"default","id":"e82a2e71-fd51-4136-baf0-3d5173cd5500","sink_uri":"kafka://127.0.0.1:9092/ticdc-split-region-test-2596?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:30:14.21240679+08:00","start_ts":449545452642893825,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545452642893825,"checkpoint_ts":449545452642893825,"checkpoint_time":"2024-05-05 11:30:08.971"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
+ run_case_with_unavailable_tikv conf/changefeed-redo.toml
+ rm -rf /tmp/tidb_cdc_test/synced_status_with_redo
+ mkdir -p /tmp/tidb_cdc_test/synced_status_with_redo
+ start_tidb_cluster --workdir /tmp/tidb_cdc_test/synced_status_with_redo
shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
The 1 times to try to start tidb cluster...
shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
table test.finish not exists for 86-th check, retry later
+ set +x
[Sun May  5 11:30:15 CST 2024] <<<<<< START kafka consumer in split_region case >>>>>>
table split_region.test1 not exists for 1-th check, retry later
table ddl_attributes.finish_mark not exists for 3-th check, retry later
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:30:17 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/e53d4904-6e39-4a1c-860e-9a11d4165bbf
	{"id":"e53d4904-6e39-4a1c-860e-9a11d4165bbf","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879814}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cd126fd6
	e53d4904-6e39-4a1c-860e-9a11d4165bbf

/tidb/cdc/default/default/upstream/7365352646737440611
	{"id":7365352646737440611,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/e53d4904-6e39-4a1c-860e-9a11d4165bbf
	{"id":"e53d4904-6e39-4a1c-860e-9a11d4165bbf","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879814}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cd126fd6
	e53d4904-6e39-4a1c-860e-9a11d4165bbf

/tidb/cdc/default/default/upstream/7365352646737440611
	{"id":7365352646737440611,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/e53d4904-6e39-4a1c-860e-9a11d4165bbf
	{"id":"e53d4904-6e39-4a1c-860e-9a11d4165bbf","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879814}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cd126fd6
	e53d4904-6e39-4a1c-860e-9a11d4165bbf

/tidb/cdc/default/default/upstream/7365352646737440611
	{"id":7365352646737440611,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ config_path=conf/changefeed.toml
+ SINK_URI='mysql://root@127.0.0.1:3306/?max-txn-row=1'
+ run_cdc_cli changefeed create --start-ts=449545453588709377 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status/conf/changefeed.toml
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status.cli.19235.out cli changefeed create --start-ts=449545453588709377 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status/conf/changefeed.toml
chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
start tidb cluster in /tmp/tidb_cdc_test/synced_status_with_redo
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table test.finish not exists for 87-th check, retry later
Create changefeed successfully!
ID: test-1
Info: {"upstream_id":7365352646737440611,"namespace":"default","id":"test-1","sink_uri":"mysql://root@127.0.0.1:3306/?max-txn-row=1","create_time":"2024-05-05T11:30:17.572179759+08:00","start_ts":449545453588709377,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":120,"checkpoint_interval":20}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545453588709377,"checkpoint_ts":449545453588709377,"checkpoint_time":"2024-05-05 11:30:12.579"}
PASS
table ddl_attributes.finish_mark not exists for 4-th check, retry later
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
table split_region.test1 not exists for 2-th check, retry later
+ set +x
++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   221  100   221    0     0   5736      0 --:--:-- --:--:-- --:--:--  5815
+ synced_status='{"synced":true,"sink_checkpoint_ts":"2024-05-05 11:30:12.579","puller_resolved_ts":"1970-01-01 08:00:00.000","last_synced_ts":"1970-01-01 08:00:00.000","now_ts":"2024-05-05 11:30:19.000","info":"Data syncing is finished"}'
++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-05' '11:30:12.579","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-05-05' '11:30:19.000","info":"Data' syncing is 'finished"}'
++ jq .synced
+ status=true
++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-05' '11:30:12.579","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-05-05' '11:30:19.000","info":"Data' syncing is 'finished"}'
++ jq -r .sink_checkpoint_ts
+ sink_checkpoint_ts='2024-05-05 11:30:12.579'
++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-05' '11:30:12.579","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-05-05' '11:30:19.000","info":"Data' syncing is 'finished"}'
++ jq -r .puller_resolved_ts
+ puller_resolved_ts='1970-01-01 08:00:00.000'
++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-05' '11:30:12.579","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-05-05' '11:30:19.000","info":"Data' syncing is 'finished"}'
++ jq -r .last_synced_ts
+ last_synced_ts='1970-01-01 08:00:00.000'
+ '[' true '!=' true ']'
+ '[' '1970-01-01 08:00:00.000' '!=' '1970-01-01 08:00:00.000' ']'
+ '[' '1970-01-01 08:00:00.000' '!=' '1970-01-01 08:00:00.000' ']'
++ date '+%Y-%m-%d %H:%M:%S'
+ current='2024-05-05 11:30:19'
+ echo 'sink_checkpoint_ts is 2024-05-05' 11:30:12.579
sink_checkpoint_ts is 2024-05-05 11:30:12.579
++ date -d '2024-05-05 11:30:12.579' +%s
+ checkpoint_timestamp=1714879812
++ date -d '2024-05-05 11:30:19' +%s
+ current_timestamp=1714879819
+ '[' 7 -gt 300 ']'
+ run_sql 'USE TEST;Create table t1(a int primary key, b int);insert into t1 values(1,2);insert into t1 values(2,3);'
table test.finish not exists for 88-th check, retry later
table ddl_attributes.finish_mark not exists for 5-th check, retry later
+ check_table_exists test.t1 127.0.0.1 3306
table test.t1 not exists for 1-th check, retry later
table split_region.test1 exists
table split_region.test2 exists
check diff failed 1-th time, retry later
table test.finish not exists for 89-th check, retry later
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_claim_check/run.sh: line 1: 19066 Killed                  cdc_kafka_consumer --upstream-uri $SINK_URI --downstream-uri="mysql://root@127.0.0.1:3306/?safe-mode=true&batch-dml-enable=false" --upstream-tidb-dsn="root@tcp(${UP_TIDB_HOST}:${UP_TIDB_PORT})/?" --config="$CUR/conf/changefeed.toml" 2>&1
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_claim_check_avro/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
check diff successfully
table ddl_attributes.finish_mark not exists for 6-th check, retry later
table test.t1 exists
+ sleep 5
start tidb cluster in /tmp/tidb_cdc_test/kafka_simple_claim_check_avro
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 90-th check, retry later
table ddl_attributes.finish_mark not exists for 7-th check, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table test.finish not exists for 91-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table ddl_attributes.finish_mark not exists for 8-th check, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   243  100   243    0     0   3530      0 --:--:-- --:--:-- --:--:--  3573
+ synced_status='{"synced":false,"sink_checkpoint_ts":"2024-05-05 11:30:25.980","puller_resolved_ts":"2024-05-05 11:30:19.179","last_synced_ts":"2024-05-05 11:30:19.679","now_ts":"2024-05-05 11:30:26.000","info":"The data syncing is not finished, please wait"}'
++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-05' '11:30:25.980","puller_resolved_ts":"2024-05-05' '11:30:19.179","last_synced_ts":"2024-05-05' '11:30:19.679","now_ts":"2024-05-05' '11:30:26.000","info":"The' data syncing is not finished, please 'wait"}'
++ jq .synced
+ status=false
+ '[' false '!=' false ']'
++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-05' '11:30:25.980","puller_resolved_ts":"2024-05-05' '11:30:19.179","last_synced_ts":"2024-05-05' '11:30:19.679","now_ts":"2024-05-05' '11:30:26.000","info":"The' data syncing is not finished, please 'wait"}'
++ jq -r .info
+ info='The data syncing is not finished, please wait'
+ '[' 'The data syncing is not finished, please wait' '!=' 'The data syncing is not finished, please wait' ']'
+ sleep 130
table test.finish not exists for 92-th check, retry later
invalid ~~~ running cdc  
Failed to start cdc, the usage tips should be printed
 2nd test case cdc_server_tips success! 
[Sun May  5 11:30:27 CST 2024] <<<<<< run all test cases cdc_server_tips success! >>>>>> 
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table ddl_attributes.finish_mark not exists for 9-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 93-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3646400004	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f, pid:13584, start at 2024-05-05 11:30:28.37110848 +0800 CST m=+5.064427666	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:32:28.379 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:30:28.368 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:20:28.368 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3646400004	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f, pid:13584, start at 2024-05-05 11:30:28.37110848 +0800 CST m=+5.064427666	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:32:28.379 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:30:28.368 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:20:28.368 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b364884000e	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f, pid:13677, start at 2024-05-05 11:30:28.527170935 +0800 CST m=+5.159986729	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:32:28.536 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:30:28.513 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:20:28.513 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/error.log
arg matches is ArgMatches { args: {"config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash-proxy.toml"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash/db/proxy"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/proxy.log"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table ddl_attributes.finish_mark exists
check diff successfully
wait process cdc.test exit for 1-th time...
wait process cdc.test exit for 2-th time...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 94-th check, retry later
wait process cdc.test exit for 3-th time...
+ cd /tmp/tidb_cdc_test/synced_status_with_redo
++ run_cdc_cli_tso_query 127.0.0.1 2379
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.cli.15060.out cli tso query --pd=http://127.0.0.1:2379
cdc.test: no process found
wait process cdc.test exit for 4-th time...
process cdc.test already exit
[Sun May  5 11:30:31 CST 2024] <<<<<< run test case ddl_attributes success! >>>>>>
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3686e0000c	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:20130, start at 2024-05-05 11:30:32.516369393 +0800 CST m=+5.626027315	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:32:32.522 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:30:32.504 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:20:32.504 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ set +x
+ tso='449545458658050049
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545458658050049 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
+ start_ts=449545458658050049
+ run_cdc_server --workdir /tmp/tidb_cdc_test/synced_status_with_redo --binary cdc.test
[Sun May  5 11:30:33 CST 2024] <<<<<< START cdc server in synced_status_with_redo case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ GO_FAILPOINTS=
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.1509915101.out server --log-file /tmp/tidb_cdc_test/synced_status_with_redo/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/synced_status_with_redo/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
table test.finish not exists for 95-th check, retry later
check diff failed 1-th time, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3686e0000c	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:20130, start at 2024-05-05 11:30:32.516369393 +0800 CST m=+5.626027315	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:32:32.522 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:30:32.504 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:20:32.504 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3680900003	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:20218, start at 2024-05-05 11:30:32.101572983 +0800 CST m=+5.157999076	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:32:32.107 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:30:32.100 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:20:32.100 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/kafka_simple_claim_check_avro/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/kafka_simple_claim_check_avro/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/kafka_simple_claim_check_avro/tiflash-proxy.toml"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/kafka_simple_claim_check_avro/tiflash/log/proxy.log"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/kafka_simple_claim_check_avro/tiflash/db/proxy"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table test.finish not exists for 96-th check, retry later
check diff failed 2-th time, retry later
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:30:36 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/bd3af456-244b-4d6a-b04c-cbd94f5c3465
	{"id":"bd3af456-244b-4d6a-b04c-cbd94f5c3465","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879833}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cd65fdcf
	bd3af456-244b-4d6a-b04c-cbd94f5c3465

/tidb/cdc/default/default/upstream/7365352746008899722
	{"id":7365352746008899722,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/bd3af456-244b-4d6a-b04c-cbd94f5c3465
	{"id":"bd3af456-244b-4d6a-b04c-cbd94f5c3465","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879833}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cd65fdcf
	bd3af456-244b-4d6a-b04c-cbd94f5c3465

/tidb/cdc/default/default/upstream/7365352746008899722
	{"id":7365352746008899722,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/bd3af456-244b-4d6a-b04c-cbd94f5c3465
	{"id":"bd3af456-244b-4d6a-b04c-cbd94f5c3465","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879833}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cd65fdcf
	bd3af456-244b-4d6a-b04c-cbd94f5c3465

/tidb/cdc/default/default/upstream/7365352746008899722
	{"id":7365352746008899722,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ config_path=conf/changefeed-redo.toml
+ SINK_URI='mysql://root@127.0.0.1:3306/?max-txn-row=1'
+ run_cdc_cli changefeed create --start-ts=449545458658050049 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/conf/changefeed-redo.toml
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.cli.15160.out cli changefeed create --start-ts=449545458658050049 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/conf/changefeed-redo.toml
Create changefeed successfully!
ID: test-1
Info: {"upstream_id":7365352746008899722,"namespace":"default","id":"test-1","sink_uri":"mysql://root@127.0.0.1:3306/?max-txn-row=1","create_time":"2024-05-05T11:30:36.893102937+08:00","start_ts":449545458658050049,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"eventual","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"storage":"file:///tmp/tidb_cdc_test/synced_status/redo","use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":120,"checkpoint_interval":20}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545458658050049,"checkpoint_ts":449545458658050049,"checkpoint_time":"2024-05-05 11:30:31.917"}
PASS
coverage: 2.5% of statements in github.com/pingcap/tiflow/...
[Sun May  5 11:30:37 CST 2024] <<<<<< START cdc server in kafka_simple_claim_check_avro case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_claim_check_avro.2166621668.out server --log-file /tmp/tidb_cdc_test/kafka_simple_claim_check_avro/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/kafka_simple_claim_check_avro/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
table test.finish not exists for 97-th check, retry later
check diff successfully
+ set +x
+ run_sql 'USE TEST;Create table t1(a int primary key, b int);insert into t1 values(1,2);insert into t1 values(2,3);'
wait process cdc.test exit for 1-th time...
+ check_table_exists test.t1 127.0.0.1 3306
table test.t1 not exists for 1-th check, retry later
wait process cdc.test exit for 2-th time...
table test.finish not exists for 98-th check, retry later
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:30:39 CST 2024] <<<<<< run test case split_region success! >>>>>>
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/event_filter/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
table test.t1 exists
+ sleep 5
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:30:40 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/8477f658-2578-418b-b82b-7d41aa0da8dd
	{"id":"8477f658-2578-418b-b82b-7d41aa0da8dd","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879837}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cd79dbcc
	8477f658-2578-418b-b82b-7d41aa0da8dd

/tidb/cdc/default/default/upstream/7365352760229870257
	{"id":7365352760229870257,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/8477f658-2578-418b-b82b-7d41aa0da8dd
	{"id":"8477f658-2578-418b-b82b-7d41aa0da8dd","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879837}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cd79dbcc
	8477f658-2578-418b-b82b-7d41aa0da8dd

/tidb/cdc/default/default/upstream/7365352760229870257
	{"id":7365352760229870257,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/8477f658-2578-418b-b82b-7d41aa0da8dd
	{"id":"8477f658-2578-418b-b82b-7d41aa0da8dd","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879837}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cd79dbcc
	8477f658-2578-418b-b82b-7d41aa0da8dd

/tidb/cdc/default/default/upstream/7365352760229870257
	{"id":7365352760229870257,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_claim_check_avro.cli.21719.out cli tso query --pd=http://127.0.0.1:2379
table test.finish not exists for 99-th check, retry later
start tidb cluster in /tmp/tidb_cdc_test/event_filter
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
+ set +x
+ tso='449545460922449927
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545460922449927 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_claim_check_avro.cli.21757.out cli changefeed create --start-ts=449545460922449927 '--sink-uri=kafka://127.0.0.1:9092/kafka-simple-claim-check-avro-2419?protocol=simple&encoding-format=avro' -c kafka-simple-claim-check-avro --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_claim_check_avro/conf/changefeed.toml
Create changefeed successfully!
ID: kafka-simple-claim-check-avro
Info: {"upstream_id":7365352760229870257,"namespace":"default","id":"kafka-simple-claim-check-avro","sink_uri":"kafka://127.0.0.1:9092/kafka-simple-claim-check-avro-2419?protocol=simple\u0026encoding-format=avro","create_time":"2024-05-05T11:30:42.472501991+08:00","start_ts":449545460922449927,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"simple","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"kafka_config":{"large_message_handle":{"large_message_handle_option":"claim-check","large_message_handle_compression":"snappy","claim_check_storage_uri":"file:///tmp/kafka-simple-avro-claim-check"}},"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545460922449927,"checkpoint_ts":449545460922449927,"checkpoint_time":"2024-05-05 11:30:40.555"}
PASS
coverage: 2.5% of statements in github.com/pingcap/tiflow/...
table test.finish not exists for 100-th check, retry later
11:30AM INF > Run case=sql/debezium/connector_test_ro.sql
Verifying downstream PD is started...
\033[0;36m<<< Run all test success >>>\033[0m
[Pipeline] }
Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc already exists)
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
+ kill_tikv
++ ps aux
++ grep tikv-server
++ grep /tmp/tidb_cdc_test/synced_status_with_redo
+ info='jenkins    12886 30.4  0.5 4727336 2272000 ?     Sl   11:30   0:07 tikv-server --pd 127.0.0.1:2379 -A 127.0.0.1:20160 --status-addr 127.0.0.1:20181 --log-file /tmp/tidb_cdc_test/synced_status_with_redo/tikv1.log --log-level debug -C /tmp/tidb_cdc_test/synced_status_with_redo/tikv-config.toml -s /tmp/tidb_cdc_test/synced_status_with_redo/tikv1
jenkins    12887 23.4  0.5 4690984 2202700 ?     Sl   11:30   0:05 tikv-server --pd 127.0.0.1:2379 -A 127.0.0.1:20161 --status-addr 127.0.0.1:20182 --log-file /tmp/tidb_cdc_test/synced_status_with_redo/tikv2.log --log-level debug -C /tmp/tidb_cdc_test/synced_status_with_redo/tikv-config.toml -s /tmp/tidb_cdc_test/synced_status_with_redo/tikv2
jenkins    12888 23.7  0.5 4695588 2220328 ?     Sl   11:30   0:05 tikv-server --pd 127.0.0.1:2379 -A 127.0.0.1:20162 --status-addr 127.0.0.1:20183 --log-file /tmp/tidb_cdc_test/synced_status_with_redo/tikv3.log --log-level debug -C /tmp/tidb_cdc_test/synced_status_with_redo/tikv-config.toml -s /tmp/tidb_cdc_test/synced_status_with_redo/tikv3
jenkins    12895 30.4  0.5 4716580 2264276 ?     Sl   11:30   0:07 tikv-server --pd 127.0.0.1:2479 -A 127.0.0.1:21160 --status-addr 127.0.0.1:21180 --log-file /tmp/tidb_cdc_test/synced_status_with_redo/tikv_down.log --log-level debug -C /tmp/tidb_cdc_test/synced_status_with_redo/tikv-config.toml -s /tmp/tidb_cdc_test/synced_status_with_redo/tikv_down'
++ ps aux
++ grep tikv-server
++ grep /tmp/tidb_cdc_test/synced_status_with_redo
++ awk '{print $2}'
++ xargs kill -9
++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   243  100   243    0     0   3129      0 --:--:-- --:--:-- --:--:--  3115
100   243  100   243    0     0   3126      0 --:--:-- --:--:-- --:--:--  3115
+ synced_status='{"synced":false,"sink_checkpoint_ts":"2024-05-05 11:30:44.018","puller_resolved_ts":"2024-05-05 11:30:38.367","last_synced_ts":"2024-05-05 11:30:38.417","now_ts":"2024-05-05 11:30:45.000","info":"The data syncing is not finished, please wait"}'
++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-05' '11:30:44.018","puller_resolved_ts":"2024-05-05' '11:30:38.367","last_synced_ts":"2024-05-05' '11:30:38.417","now_ts":"2024-05-05' '11:30:45.000","info":"The' data syncing is not finished, please 'wait"}'
++ jq .synced
+ status=false
+ '[' false '!=' false ']'
++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-05' '11:30:44.018","puller_resolved_ts":"2024-05-05' '11:30:38.367","last_synced_ts":"2024-05-05' '11:30:38.417","now_ts":"2024-05-05' '11:30:45.000","info":"The' data syncing is not finished, please 'wait"}'
++ jq -r .info
+ info='The data syncing is not finished, please wait'
+ target_message='The data syncing is not finished, please wait'
+ '[' 'The data syncing is not finished, please wait' '!=' 'The data syncing is not finished, please wait' ']'
+ sleep 130
table test.finish not exists for 101-th check, retry later
+ set +x
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table test.finish not exists for 102-th check, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 103-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_claim_check_avro.cli.21825.out cli changefeed pause -c kafka-simple-claim-check-avro
PASS
coverage: 2.0% of statements in github.com/pingcap/tiflow/...
table test.finish not exists for 104-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_claim_check_avro.cli.21861.out cli changefeed update -c kafka-simple-claim-check-avro '--sink-uri=kafka://127.0.0.1:9092/kafka-simple-claim-check-avro-2419?protocol=simple&encoding-format=avro&max-message-bytes=2048' --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_claim_check_avro/conf/changefeed.toml --no-confirm
Diff of changefeed config:
{Type:update Path:[SinkURI] From:kafka://127.0.0.1:9092/kafka-simple-claim-check-avro-2419?protocol=simple&encoding-format=avro To:kafka://127.0.0.1:9092/kafka-simple-claim-check-avro-2419?protocol=simple&encoding-format=avro&max-message-bytes=2048}
{Type:update Path:[Config SyncPointInterval] From:<nil> To:0xc0013712b8}
{Type:update Path:[Config SyncPointRetention] From:<nil> To:0xc0013712c8}
{Type:update Path:[Config Consistent] From:<nil> To:0xc001332620}
Update changefeed config successfully! 
ID: kafka-simple-claim-check-avro
Info: {"upstream_id":7365352760229870257,"namespace":"default","id":"kafka-simple-claim-check-avro","sink_uri":"kafka://127.0.0.1:9092/kafka-simple-claim-check-avro-2419?protocol=simple\u0026encoding-format=avro\u0026max-message-bytes=2048","create_time":"2024-05-05T11:30:42.472501991+08:00","start_ts":449545460922449927,"admin_job_type":1,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_table_monitor":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"simple","encoder_concurrency":32,"terminator":"\r\n","enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"kafka_config":{"large_message_handle":{"large_message_handle_option":"claim-check","large_message_handle_compression":"snappy","claim_check_storage_uri":"file:///tmp/kafka-simple-avro-claim-check"}},"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"stopped","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":0,"checkpoint_ts":449545463504568323,"checkpoint_time":"2024-05-05 11:30:50.405"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
table test.finish not exists for 105-th check, retry later
\033[0;36m<<< Run all test success >>>\033[0m
[Pipeline] }
Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc already exists)
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // container
[Pipeline] }
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b37caa00013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:26987, start at 2024-05-05 11:30:53.243039717 +0800 CST m=+5.094430119	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:32:53.249 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:30:53.224 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:20:53.224 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b37caa00013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:26987, start at 2024-05-05 11:30:53.243039717 +0800 CST m=+5.094430119	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:32:53.249 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:30:53.224 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:20:53.224 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b37cc180013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:27060, start at 2024-05-05 11:30:53.343641886 +0800 CST m=+5.145850009	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:32:53.350 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:30:53.318 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:20:53.318 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/event_filter/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/event_filter/tiflash/log/error.log
arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/event_filter/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/event_filter/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/event_filter/tiflash-proxy.toml"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.kafka_simple_claim_check_avro.cli.21898.out cli changefeed resume -c kafka-simple-claim-check-avro
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
PASS
table test.finish not exists for 106-th check, retry later
coverage: 2.1% of statements in github.com/pingcap/tiflow/...
+ set +x
table test.finish_mark not exists for 1-th check, retry later
[Sun May  5 11:30:57 CST 2024] <<<<<< START cdc server in event_filter case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ GO_FAILPOINTS=
+ (( i = 0 ))
+ (( i <= 50 ))
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.event_filter.2851128513.out server --log-file /tmp/tidb_cdc_test/event_filter/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/event_filter/cdc_data --cluster-id default --addr 127.0.0.1:8300 --pd http://127.0.0.1:2379
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
table test.finish not exists for 107-th check, retry later
table test.finish not exists for 108-th check, retry later
table test.finish_mark not exists for 2-th check, retry later
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:31:00 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/0ddde927-117d-46cb-ae73-586d8fe0d258
	{"id":"0ddde927-117d-46cb-ae73-586d8fe0d258","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879857}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cdc6e3cd
	0ddde927-117d-46cb-ae73-586d8fe0d258

/tidb/cdc/default/default/upstream/7365352842411844564
	{"id":7365352842411844564,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/0ddde927-117d-46cb-ae73-586d8fe0d258
	{"id":"0ddde927-117d-46cb-ae73-586d8fe0d258","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879857}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cdc6e3cd
	0ddde927-117d-46cb-ae73-586d8fe0d258

/tidb/cdc/default/default/upstream/7365352842411844564
	{"id":7365352842411844564,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/0ddde927-117d-46cb-ae73-586d8fe0d258
	{"id":"0ddde927-117d-46cb-ae73-586d8fe0d258","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879857}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cdc6e3cd
	0ddde927-117d-46cb-ae73-586d8fe0d258

/tidb/cdc/default/default/upstream/7365352842411844564
	{"id":7365352842411844564,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.event_filter.cli.28575.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-event-filter-19324?protocol=open-protocol&partition-num=4&kafka-version=2.4.1&max-message-bytes=10485760' --server=127.0.0.1:8300 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/event_filter/conf/cf.toml
Create changefeed successfully!
ID: c7357908-a3cd-42af-ab86-a02acc860436
Info: {"upstream_id":7365352842411844564,"namespace":"default","id":"c7357908-a3cd-42af-ab86-a02acc860436","sink_uri":"kafka://127.0.0.1:9092/ticdc-event-filter-19324?protocol=open-protocol\u0026partition-num=4\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:31:00.666869047+08:00","start_ts":449545466157203460,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["event_filter.*"],"event_filters":[{"matcher":["event_filter.t1"],"ignore_event":["drop table","delete"],"ignore_sql":null,"ignore_insert_value_expr":"id = 2 or city = 'tokyo'","ignore_update_new_value_expr":"","ignore_update_old_value_expr":"","ignore_delete_value_expr":""},{"matcher":["event_filter.t_truncate"],"ignore_event":["truncate table"],"ignore_sql":null,"ignore_insert_value_expr":"","ignore_update_new_value_expr":"","ignore_update_old_value_expr":"","ignore_delete_value_expr":""},{"matcher":["event_filter.t_alter"],"ignore_event":["alter table"],"ignore_sql":null,"ignore_insert_value_expr":"","ignore_update_new_value_expr":"","ignore_update_old_value_expr":"","ignore_delete_value_expr":""}]},"mounter":{"worker_num":16},"sink":{"protocol":"open-protocol","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545466157203460,"checkpoint_ts":449545466157203460,"checkpoint_time":"2024-05-05 11:31:00.524"}
PASS
coverage: 2.5% of statements in github.com/pingcap/tiflow/...
table test.finish_mark exists
check diff successfully
wait process cdc.test exit for 1-th time...
table test.finish not exists for 109-th check, retry later
+ set +x
[Sun May  5 11:31:02 CST 2024] <<<<<< START kafka consumer in event_filter case >>>>>>
wait process cdc.test exit for 2-th time...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:31:02 CST 2024] <<<<<< run test case kafka_simple_claim_check_avro success! >>>>>>
table test.finish not exists for 110-th check, retry later
table event_filter.t1 does not exists
table event_filter.t1 exists
table event_filter.t_normal not exists for 1-th check, retry later
table test.finish not exists for 111-th check, retry later
table event_filter.t_normal exists
table event_filter.t_truncate not exists for 1-th check, retry later
table test.finish not exists for 112-th check, retry later
table test.finish not exists for 113-th check, retry later
table event_filter.t_truncate exists
table event_filter.t_alter not exists for 1-th check, retry later
table event_filter.t_alter exists
table test.finish not exists for 114-th check, retry later
table event_filter.finish_mark exists
check diff failed 1-th time, retry later
table test.finish not exists for 115-th check, retry later
check diff failed 2-th time, retry later
table test.finish not exists for 116-th check, retry later
check diff successfully
wait process cdc.test exit for 1-th time...
wait process cdc.test exit for 2-th time...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:31:18 CST 2024] <<<<<< run test case event_filter success! >>>>>>
table test.finish not exists for 117-th check, retry later
table test.finish not exists for 118-th check, retry later
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/kafka_simple_claim_check_avro/run.sh: line 1: 21930 Killed                  cdc_kafka_consumer --upstream-uri $SINK_URI --downstream-uri="mysql://root@127.0.0.1:3306/?safe-mode=true&batch-dml-enable=false" --upstream-tidb-dsn="root@tcp(${UP_TIDB_HOST}:${UP_TIDB_PORT})/?" --config="$CUR/conf/changefeed.toml" 2>&1
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/canal_json_adapter_compatibility/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
start tidb cluster in /tmp/tidb_cdc_test/canal_json_adapter_compatibility
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
11:31AM INF > Run case=sql/debezium/datetime_key_test.sql
table test.finish not exists for 119-th check, retry later
Verifying downstream PD is started...
table test.finish not exists for 120-th check, retry later
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table test.finish not exists for 121-th check, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 122-th check, retry later
11:31AM INF > Run case=sql/debezium/db_default_charset.sql
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 123-th check, retry later
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sql_mode/run.sh using Sink-Type: kafka... <<=================
+++ dirname /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sql_mode/run.sh
++ cd /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sql_mode
++ pwd
+ CUR=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sql_mode
+ source /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/sql_mode/../_utils/test_prepare
++ UP_TIDB_HOST=127.0.0.1
++ UP_TIDB_PORT=4000
++ UP_TIDB_OTHER_PORT=4001
++ UP_TIDB_STATUS=10080
++ UP_TIDB_OTHER_STATUS=10081
++ DOWN_TIDB_HOST=127.0.0.1
++ DOWN_TIDB_PORT=3306
++ DOWN_TIDB_STATUS=20080
++ TLS_TIDB_HOST=127.0.0.1
++ TLS_TIDB_PORT=3307
++ TLS_TIDB_STATUS=30080
++ UP_PD_HOST_1=127.0.0.1
++ UP_PD_PORT_1=2379
++ UP_PD_PEER_PORT_1=2380
++ UP_PD_HOST_2=127.0.0.1
++ UP_PD_PORT_2=2679
++ UP_PD_PEER_PORT_2=2680
++ UP_PD_HOST_3=127.0.0.1
++ UP_PD_PORT_3=2779
++ UP_PD_PEER_PORT_3=2780
++ DOWN_PD_HOST=127.0.0.1
++ DOWN_PD_PORT=2479
++ DOWN_PD_PEER_PORT=2480
++ TLS_PD_HOST=127.0.0.1
++ TLS_PD_PORT=2579
++ TLS_PD_PEER_PORT=2580
++ UP_TIKV_HOST_1=127.0.0.1
++ UP_TIKV_PORT_1=20160
++ UP_TIKV_STATUS_PORT_1=20181
++ UP_TIKV_HOST_2=127.0.0.1
++ UP_TIKV_PORT_2=20161
++ UP_TIKV_STATUS_PORT_2=20182
++ UP_TIKV_HOST_3=127.0.0.1
++ UP_TIKV_PORT_3=20162
++ UP_TIKV_STATUS_PORT_3=20183
++ DOWN_TIKV_HOST=127.0.0.1
++ DOWN_TIKV_PORT=21160
++ DOWN_TIKV_STATUS_PORT=21180
++ TLS_TIKV_HOST=127.0.0.1
++ TLS_TIKV_PORT=22160
++ TLS_TIKV_STATUS_PORT=22180
+++ cat /tmp/tidb_cdc_test/KAFKA_VERSION
+++ echo 2.4.1
++ KAFKA_VERSION=2.4.1
+ WORK_DIR=/tmp/tidb_cdc_test/sql_mode
+ CDC_BINARY=cdc.test
+ SINK_TYPE=kafka
+ CDC_COUNT=3
+ DB_COUNT=4
+ rm -rf /tmp/tidb_cdc_test/sql_mode
+ mkdir -p /tmp/tidb_cdc_test/sql_mode
+ start_tidb_cluster --workdir /tmp/tidb_cdc_test/sql_mode
The 1 times to try to start tidb cluster...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 124-th check, retry later
11:31AM INF > Run case=sql/debezium/db_default_charset_noutf.sql
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3a39ec001f	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:23028, start at 2024-05-05 11:31:33.15295888 +0800 CST m=+5.153938732	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:33:33.161 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:31:33.165 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:21:33.165 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3a39ec001f	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:23028, start at 2024-05-05 11:31:33.15295888 +0800 CST m=+5.153938732	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:33:33.161 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:31:33.165 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:21:33.165 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3a39f00014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:23108, start at 2024-05-05 11:31:33.138991779 +0800 CST m=+5.084635200	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:33:33.145 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:31:33.116 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:21:33.116 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/canal_json_adapter_compatibility/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/canal_json_adapter_compatibility/tiflash/log/error.log
arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/canal_json_adapter_compatibility/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/canal_json_adapter_compatibility/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/canal_json_adapter_compatibility/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table test.finish not exists for 125-th check, retry later
start tidb cluster in /tmp/tidb_cdc_test/sql_mode
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.canal_json_adapter_compatibility.cli.24558.out cli tso query --pd=http://127.0.0.1:2379
table test.finish not exists for 126-th check, retry later
+ set +x
+ tso='449545475618242561
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545475618242561 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
[Sun May  5 11:31:38 CST 2024] <<<<<< START cdc server in canal_json_adapter_compatibility case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.canal_json_adapter_compatibility.2459324595.out server --log-file /tmp/tidb_cdc_test/canal_json_adapter_compatibility/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/canal_json_adapter_compatibility/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
11:31AM INF > Run case=sql/debezium/decimal_column_test.sql
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table test.finish not exists for 127-th check, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 128-th check, retry later
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:31:41 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/268c7be7-faa9-4195-b024-369276fa24f6
	{"id":"268c7be7-faa9-4195-b024-369276fa24f6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879898}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ce6284d2
	268c7be7-faa9-4195-b024-369276fa24f6

/tidb/cdc/default/default/upstream/7365353010653984405
	{"id":7365353010653984405,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/268c7be7-faa9-4195-b024-369276fa24f6
	{"id":"268c7be7-faa9-4195-b024-369276fa24f6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879898}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ce6284d2
	268c7be7-faa9-4195-b024-369276fa24f6

/tidb/cdc/default/default/upstream/7365353010653984405
	{"id":7365353010653984405,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/268c7be7-faa9-4195-b024-369276fa24f6
	{"id":"268c7be7-faa9-4195-b024-369276fa24f6","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879898}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ce6284d2
	268c7be7-faa9-4195-b024-369276fa24f6

/tidb/cdc/default/default/upstream/7365353010653984405
	{"id":7365353010653984405,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.canal_json_adapter_compatibility.cli.24648.out cli changefeed create --start-ts=449545475618242561 '--sink-uri=kafka://127.0.0.1:9092/test?protocol=canal-json&kafka-version=2.4.1&max-message-bytes=10485760'
Create changefeed successfully!
ID: 868c8d8d-b1c0-462b-aef2-150436c79025
Info: {"upstream_id":7365353010653984405,"namespace":"default","id":"868c8d8d-b1c0-462b-aef2-150436c79025","sink_uri":"kafka://127.0.0.1:9092/test?protocol=canal-json\u0026kafka-version=2.4.1\u0026max-message-bytes=10485760","create_time":"2024-05-05T11:31:41.603486507+08:00","start_ts":449545475618242561,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545475618242561,"checkpoint_ts":449545475618242561,"checkpoint_time":"2024-05-05 11:31:36.615"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 129-th check, retry later
+ set +x
11:31AM INF > Run case=sql/debezium/enum_column_test.sql
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 130-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3af7f80003	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:29888, start at 2024-05-05 11:31:45.282618248 +0800 CST m=+5.112570130	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:33:45.289 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:31:45.278 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:21:45.278 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3af7f80003	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:29888, start at 2024-05-05 11:31:45.282618248 +0800 CST m=+5.112570130	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:33:45.289 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:31:45.278 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:21:45.278 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3af8780008	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:29980, start at 2024-05-05 11:31:45.317843253 +0800 CST m=+5.092992443	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:33:45.323 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:31:45.310 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:21:45.310 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/sql_mode/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/sql_mode/tiflash/log/error.log
arg matches is ArgMatches { args: {"data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/sql_mode/tiflash/db/proxy"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/sql_mode/tiflash-proxy.toml"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/sql_mode/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table test.finish not exists for 131-th check, retry later
+ trap stop_tidb_cluster EXIT
+ run_sql 'set global sql_mode='\''NO_BACKSLASH_ESCAPES'\'';' 127.0.0.1 4000
+ run_sql 'set global sql_mode='\''NO_BACKSLASH_ESCAPES'\'';' 127.0.0.1 3306
+ cd /tmp/tidb_cdc_test/sql_mode
++ run_cdc_cli_tso_query 127.0.0.1 2379
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sql_mode.cli.31432.out cli tso query --pd=http://127.0.0.1:2379
table test.finish not exists for 132-th check, retry later
+ set +x
+ tso='449545478819807233
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545478819807233 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
+ start_ts=449545478819807233
+ run_cdc_server --workdir /tmp/tidb_cdc_test/sql_mode --binary cdc.test
[Sun May  5 11:31:50 CST 2024] <<<<<< START cdc server in sql_mode case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sql_mode.3147331475.out server --log-file /tmp/tidb_cdc_test/sql_mode/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/sql_mode/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
table test.finish not exists for 133-th check, retry later
11:31AM INF > Run case=sql/debezium/multitable_dbz_871.sql
table test.finish not exists for 134-th check, retry later
11:31AM INF > Run case=sql/debezium/mysql_dbz_6533.sql
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:31:53 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/84625281-dc0f-4c16-836a-41bb68afb573
	{"id":"84625281-dc0f-4c16-836a-41bb68afb573","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879910}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ce942dcf
	84625281-dc0f-4c16-836a-41bb68afb573

/tidb/cdc/default/default/upstream/7365353076553067605
	{"id":7365353076553067605,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/84625281-dc0f-4c16-836a-41bb68afb573
	{"id":"84625281-dc0f-4c16-836a-41bb68afb573","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879910}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ce942dcf
	84625281-dc0f-4c16-836a-41bb68afb573

/tidb/cdc/default/default/upstream/7365353076553067605
	{"id":7365353076553067605,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/84625281-dc0f-4c16-836a-41bb68afb573
	{"id":"84625281-dc0f-4c16-836a-41bb68afb573","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879910}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ce942dcf
	84625281-dc0f-4c16-836a-41bb68afb573

/tidb/cdc/default/default/upstream/7365353076553067605
	{"id":7365353076553067605,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ SINK_URI='mysql://root@127.0.0.1:3306/?max-txn-row=1'
+ run_cdc_cli changefeed create --start-ts=449545478819807233 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sql_mode.cli.31526.out cli changefeed create --start-ts=449545478819807233 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1
Create changefeed successfully!
ID: test-1
Info: {"upstream_id":7365353076553067605,"namespace":"default","id":"test-1","sink_uri":"mysql://root@127.0.0.1:3306/?max-txn-row=1","create_time":"2024-05-05T11:31:53.787993974+08:00","start_ts":449545478819807233,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545478819807233,"checkpoint_ts":449545478819807233,"checkpoint_time":"2024-05-05 11:31:48.828"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
table test.binary_columns not exists for 1-th check, retry later
table test.finish not exists for 135-th check, retry later
+ set +x
+ run_sql 'use test; create table t1(id bigint primary key, a text, b text as ((regexp_replace(a, '\''^[1-9]\d{9,29}$'\'', '\''aaaaa'\''))), c text); insert into t1 (id, a, c) values(1,123456, '\''ab\\\\c'\''); insert into t1 (id, a, c) values(2,1234567890123, '\''ab\\c'\'');' 127.0.0.1 4000
+ '[' kafka == mysql ']'
+ stop_tidb_cluster
table test.binary_columns not exists for 2-th check, retry later
table test.finish not exists for 136-th check, retry later
table test.binary_columns not exists for 3-th check, retry later
table test.finish not exists for 137-th check, retry later
table test.finish not exists for 138-th check, retry later
table test.binary_columns not exists for 4-th check, retry later
table test.finish not exists for 139-th check, retry later
table test.binary_columns not exists for 5-th check, retry later
table test.binary_columns exists
check diff failed 1-th time, retry later
table test.finish not exists for 140-th check, retry later
table test.finish not exists for 141-th check, retry later
check diff successfully
table test.finish not exists for 142-th check, retry later
+ start_tidb_cluster --workdir /tmp/tidb_cdc_test/sql_mode
The 1 times to try to start tidb cluster...
table test.finish not exists for 143-th check, retry later
11:32AM INF > Run case=sql/debezium/nationalized_character_test.sql
table test.finish not exists for 144-th check, retry later
start tidb cluster in /tmp/tidb_cdc_test/sql_mode
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table test.finish not exists for 145-th check, retry later
11:32AM INF > Run case=sql/debezium/numeric_column_test.sql
table test.finish not exists for 146-th check, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table test.finish not exists for 147-th check, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 148-th check, retry later
11:32AM INF > Run case=sql/debezium/readbinlog_test.sql
11:32AM INF > Run case=sql/debezium/real_test.sql
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3af7f80003	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:29888, start at 2024-05-05 11:31:45.282618248 +0800 CST m=+5.112570130	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:33:45.289 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	60m	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:31:45.278 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:21:45.278 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3af7f80003	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:29888, start at 2024-05-05 11:31:45.282618248 +0800 CST m=+5.112570130	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:33:45.289 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	60m	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:31:45.278 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:21:45.278 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3af8780008	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:29980, start at 2024-05-05 11:31:45.317843253 +0800 CST m=+5.092992443	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:33:45.323 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	60m	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:31:45.310 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:21:45.310 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
tidb_disable_column_tracking_time	2024-05-05 03:31:46 UTC	Record the last time tidb_enable_column_tracking is set off
ERROR 1396 (HY000) at line 1: Operation CREATE USER failed for 'normal'@'%'
start tidb cluster failed
The 2 times to try to start tidb cluster...
table test.binary_columns exists
check diff failed 1-th time, retry later
table test.finish not exists for 149-th check, retry later
check diff failed 2-th time, retry later
table test.finish not exists for 150-th check, retry later
check diff failed 3-th time, retry later
table test.finish not exists for 151-th check, retry later
check diff failed 4-th time, retry later
table test.finish not exists for 152-th check, retry later
check diff failed 5-th time, retry later
table test.finish not exists for 153-th check, retry later
check diff successfully
wait process cdc.test exit for 1-th time...
start tidb cluster in /tmp/tidb_cdc_test/sql_mode
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
wait process cdc.test exit for 2-th time...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:32:33 CST 2024] <<<<<< run test case canal_json_adapter_compatibility success! >>>>>>
table test.finish not exists for 154-th check, retry later
11:32AM INF > Run case=sql/debezium/regression_test.sql
table test.finish not exists for 155-th check, retry later
table test.finish not exists for 156-th check, retry later
table test.finish not exists for 157-th check, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table test.finish not exists for 158-th check, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/canal_json_basic/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
table test.finish not exists for 159-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3af7f80003	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:29888, start at 2024-05-05 11:31:45.282618248 +0800 CST m=+5.112570130	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:33:45.289 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	60m	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:31:45.278 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:21:45.278 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3af7f80003	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:29888, start at 2024-05-05 11:31:45.282618248 +0800 CST m=+5.112570130	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:33:45.289 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	60m	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:31:45.278 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:21:45.278 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3af8780008	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:29980, start at 2024-05-05 11:31:45.317843253 +0800 CST m=+5.092992443	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:33:45.323 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	60m	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:31:45.310 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:21:45.310 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
tidb_disable_column_tracking_time	2024-05-05 03:31:46 UTC	Record the last time tidb_enable_column_tracking is set off
ERROR 1396 (HY000) at line 1: Operation CREATE USER failed for 'normal'@'%'
start tidb cluster failed
The 3 times to try to start tidb cluster...
table test.finish not exists for 160-th check, retry later
start tidb cluster in /tmp/tidb_cdc_test/canal_json_basic
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table test.finish not exists for 161-th check, retry later
++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   221  100   221    0     0   2613      0 --:--:-- --:--:-- --:--:--  2630
+ synced_status='{"synced":true,"sink_checkpoint_ts":"2024-05-05 11:32:36.129","puller_resolved_ts":"2024-05-05 11:32:28.129","last_synced_ts":"2024-05-05 11:30:19.679","now_ts":"2024-05-05 11:32:36.000","info":"Data syncing is finished"}'
++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-05' '11:32:36.129","puller_resolved_ts":"2024-05-05' '11:32:28.129","last_synced_ts":"2024-05-05' '11:30:19.679","now_ts":"2024-05-05' '11:32:36.000","info":"Data' syncing is 'finished"}'
++ jq .synced
+ status=true
+ '[' true '!=' true ']'
+ kill_pd
++ ps aux
++ grep pd-server
++ grep /tmp/tidb_cdc_test/synced_status
+ info='jenkins    16831  7.1  0.0 13652244 143080 ?     Sl   11:29   0:11 pd-server --advertise-client-urls http://127.0.0.1:2379 --client-urls http://0.0.0.0:2379 --advertise-peer-urls http://127.0.0.1:2380 --peer-urls http://0.0.0.0:2380 --config /tmp/tidb_cdc_test/synced_status/pd-config.toml --log-file /tmp/tidb_cdc_test/synced_status/pd1.log --data-dir /tmp/tidb_cdc_test/synced_status/pd1 --name=pd1 --initial-cluster=pd1=http://127.0.0.1:2380
jenkins    16892  4.7  0.0 13660376 136096 ?     Sl   11:29   0:07 pd-server --advertise-client-urls http://127.0.0.1:2479 --client-urls http://0.0.0.0:2479 --advertise-peer-urls http://127.0.0.1:2480 --peer-urls http://0.0.0.0:2480 --config /tmp/tidb_cdc_test/synced_status/pd-config.toml --log-file /tmp/tidb_cdc_test/synced_status/down_pd.log --data-dir /tmp/tidb_cdc_test/synced_status/down_pd'
++ ps aux
++ grep pd-server
++ grep /tmp/tidb_cdc_test/synced_status
++ awk '{print $2}'
++ xargs kill -9
+ sleep 20
{"level":"warn","ts":1714879962.1514792,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0013e16c0/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"info","ts":1714879962.1515276,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"}
{"level":"warn","ts":1714879962.2201872,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0021ace00/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""}
{"level":"info","ts":1714879962.220249,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"}
{"level":"warn","ts":1714879963.128164,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0023c21c0/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"info","ts":1714879963.128216,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"}
{"level":"warn","ts":"2024-05-05T11:32:47.009501+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00126a000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:32:47.012916+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0011f2000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:32:47.065038+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00131a000/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""}
table test.finish not exists for 162-th check, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table test.finish not exists for 163-th check, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
{"level":"warn","ts":"2024-05-05T11:32:53.011084+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00126a000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:32:53.014187+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0011f2000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
{"level":"warn","ts":"2024-05-05T11:32:53.066053+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00131a000/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""}
table test.finish not exists for 164-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 165-th check, retry later
table test.finish not exists for 166-th check, retry later
++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3f4a700008	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:26129, start at 2024-05-05 11:32:56.099388374 +0800 CST m=+5.114225490	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:34:56.105 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:32:56.092 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:22:56.092 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3f4a700008	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:26129, start at 2024-05-05 11:32:56.099388374 +0800 CST m=+5.114225490	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:34:56.105 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:32:56.092 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:22:56.092 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3f4b240014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:26205, start at 2024-05-05 11:32:56.184485812 +0800 CST m=+5.147158098	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:34:56.193 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:32:56.187 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:22:56.187 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/canal_json_basic/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/canal_json_basic/tiflash/log/error.log
arg matches is ArgMatches { args: {"log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/canal_json_basic/tiflash/log/proxy.log"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/canal_json_basic/tiflash-proxy.toml"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/canal_json_basic/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
start tidb cluster in /tmp/tidb_cdc_test/sql_mode
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table test.finish not exists for 167-th check, retry later

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:01 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:02 --:--:--     0{"level":"warn","ts":"2024-05-05T11:32:59.012321+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00126a000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:32:59.015564+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0011f2000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:32:59.067687+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00131a000/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""}
[Sun May  5 11:32:59 CST 2024] <<<<<< START cdc server in canal_json_basic case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.canal_json_basic.2758927591.out server --log-file /tmp/tidb_cdc_test/canal_json_basic/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/canal_json_basic/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
table test.finish not exists for 168-th check, retry later
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:33:02 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/ee1cb0ee-2988-41c6-b088-ae6f38855d3c
	{"id":"ee1cb0ee-2988-41c6-b088-ae6f38855d3c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879979}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cfaac0c5
	ee1cb0ee-2988-41c6-b088-ae6f38855d3c

/tidb/cdc/default/default/upstream/7365353377945592476
	{"id":7365353377945592476,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/ee1cb0ee-2988-41c6-b088-ae6f38855d3c
	{"id":"ee1cb0ee-2988-41c6-b088-ae6f38855d3c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879979}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cfaac0c5
	ee1cb0ee-2988-41c6-b088-ae6f38855d3c

/tidb/cdc/default/default/upstream/7365353377945592476
	{"id":7365353377945592476,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/ee1cb0ee-2988-41c6-b088-ae6f38855d3c
	{"id":"ee1cb0ee-2988-41c6-b088-ae6f38855d3c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879979}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46cfaac0c5
	ee1cb0ee-2988-41c6-b088-ae6f38855d3c

/tidb/cdc/default/default/upstream/7365353377945592476
	{"id":7365353377945592476,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.canal_json_basic.cli.27646.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-canal-json-basic?protocol=canal-json&enable-tidb-extension=true'
Create changefeed successfully!
ID: 91ab47a5-6115-4cc9-adb8-569ca9a707b1
Info: {"upstream_id":7365353377945592476,"namespace":"default","id":"91ab47a5-6115-4cc9-adb8-569ca9a707b1","sink_uri":"kafka://127.0.0.1:9092/ticdc-canal-json-basic?protocol=canal-json\u0026enable-tidb-extension=true","create_time":"2024-05-05T11:33:02.87555472+08:00","start_ts":449545498195656707,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545498195656707,"checkpoint_ts":449545498195656707,"checkpoint_time":"2024-05-05 11:33:02.741"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
table test.finish not exists for 169-th check, retry later
+ set +x

  0     0    0     0    0     0      0      0 --:--:--  0:00:03 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:04 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:05 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:06 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:07 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:08 --:--:--     0{"level":"warn","ts":"2024-05-05T11:33:05.012899+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00126a000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:33:05.016263+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0011f2000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:33:05.068861+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00131a000/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""}
table test.finish not exists for 170-th check, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   723  100   723    0     0   8736      0 --:--:-- --:--:-- --:--:--  8817
+ synced_status='{"synced":false,"sink_checkpoint_ts":"2024-05-05 11:30:45.017","puller_resolved_ts":"2024-05-05 11:30:45.017","last_synced_ts":"2024-05-05 11:30:38.417","now_ts":"2024-05-05 11:32:55.000","info":"Please check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view '\''TiKV-Details'\'' \u003e '\''Resolved-Ts'\'' \u003e '\''Max Leader Resolved TS gap'\'' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please wait"}'
++ jq .synced
++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-05' '11:30:45.017","puller_resolved_ts":"2024-05-05' '11:30:45.017","last_synced_ts":"2024-05-05' '11:30:38.417","now_ts":"2024-05-05' '11:32:55.000","info":"Please' check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view ''\''TiKV-Details'\''' '\u003e' ''\''Resolved-Ts'\''' '\u003e' ''\''Max' Leader Resolved TS 'gap'\''' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please 'wait"}'
+ status=false
+ '[' false '!=' false ']'
++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-05' '11:30:45.017","puller_resolved_ts":"2024-05-05' '11:30:45.017","last_synced_ts":"2024-05-05' '11:30:38.417","now_ts":"2024-05-05' '11:32:55.000","info":"Please' check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view ''\''TiKV-Details'\''' '\u003e' ''\''Resolved-Ts'\''' '\u003e' ''\''Max' Leader Resolved TS 'gap'\''' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please 'wait"}'
++ jq -r .info
+ info='Please check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view '\''TiKV-Details'\'' > '\''Resolved-Ts'\'' > '\''Max Leader Resolved TS gap'\'' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please wait'
+ target_message='Please check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view '\''TiKV-Details'\'' > '\''Resolved-Ts'\'' > '\''Max Leader Resolved TS gap'\'' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please wait'
+ '[' 'Please check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view '\''TiKV-Details'\'' > '\''Resolved-Ts'\'' > '\''Max Leader Resolved TS gap'\'' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please wait' '!=' 'Please check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view '\''TiKV-Details'\'' > '\''Resolved-Ts'\'' > '\''Max Leader Resolved TS gap'\'' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please wait' ']'
+ cleanup_process cdc.test
wait process cdc.test exit for 1-th time...
wait process cdc.test exit for 2-th time...
wait process cdc.test exit for 3-th time...
cdc.test: no process found
wait process cdc.test exit for 4-th time...
process cdc.test already exit
+ stop_tidb_cluster
+ run_case_with_unavailable_tidb conf/changefeed-redo.toml
+ rm -rf /tmp/tidb_cdc_test/synced_status_with_redo
+ mkdir -p /tmp/tidb_cdc_test/synced_status_with_redo
+ start_tidb_cluster --workdir /tmp/tidb_cdc_test/synced_status_with_redo
shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
The 1 times to try to start tidb cluster...
shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory

  0     0    0     0    0     0      0      0 --:--:--  0:00:09 --:--:--     0{"level":"warn","ts":"2024-05-05T11:33:07.001036+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0011f2000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"info","ts":"2024-05-05T11:33:07.001076+0800","logger":"etcd-client","caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"}

  0     0    0     0    0     0      0      0 --:--:--  0:00:10 --:--:--     0{"level":"warn","ts":"2024-05-05T11:33:07.004301+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00126a000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"info","ts":"2024-05-05T11:33:07.004334+0800","logger":"etcd-client","caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"}
{"level":"warn","ts":"2024-05-05T11:33:07.056058+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00131a000/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""}
{"level":"info","ts":"2024-05-05T11:33:07.056095+0800","logger":"etcd-client","caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"}
table test.finish not exists for 171-th check, retry later
chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
start tidb cluster in /tmp/tidb_cdc_test/synced_status_with_redo
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table test.finish not exists for 172-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3af7f80003	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:29888, start at 2024-05-05 11:31:45.282618248 +0800 CST m=+5.112570130	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:33:45.289 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	60m	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:31:45.278 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:21:45.278 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3af7f80003	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:29888, start at 2024-05-05 11:31:45.282618248 +0800 CST m=+5.112570130	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:33:45.289 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	60m	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:31:45.278 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:21:45.278 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b3af8780008	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-34lmg-zbbd0, pid:29980, start at 2024-05-05 11:31:45.317843253 +0800 CST m=+5.092992443	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:33:45.323 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	60m	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:31:45.310 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:21:45.310 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
tidb_disable_column_tracking_time	2024-05-05 03:31:46 UTC	Record the last time tidb_enable_column_tracking is set off
ERROR 1396 (HY000) at line 1: Operation CREATE USER failed for 'normal'@'%'
start tidb cluster failed
+ run_sql 'set global sql_mode='\''ANSI_QUOTES'\'';' 127.0.0.1 4000
+ run_sql 'set global sql_mode='\''ANSI_QUOTES'\'';' 127.0.0.1 3306
++ run_cdc_cli_tso_query 127.0.0.1 2379
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sql_mode.cli.35657.out cli tso query --pd=http://127.0.0.1:2379
[Sun May  5 11:33:09 CST 2024] <<<<<< START kafka consumer in canal_json_basic case >>>>>>
Verifying downstream PD is started...
+ set +x
+ tso='449545499928952833
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545499928952833 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
+ start_ts=449545499928952833
+ run_cdc_server --workdir /tmp/tidb_cdc_test/sql_mode --binary cdc.test
[Sun May  5 11:33:10 CST 2024] <<<<<< START cdc server in sql_mode case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sql_mode.3569435696.out server --log-file /tmp/tidb_cdc_test/sql_mode/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/sql_mode/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3

  0     0    0     0    0     0      0      0 --:--:--  0:00:11 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:12 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:13 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:14 --:--:--     0{"level":"warn","ts":"2024-05-05T11:33:11.014478+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00126a000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:33:11.017684+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0011f2000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:33:11.069749+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00131a000/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""}
table test.finish not exists for 173-th check, retry later
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table test.finish not exists for 174-th check, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:33:13 GMT
< Content-Type: text/plain; charset=utf-8
< Transfer-Encoding: chunked
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** processors info ***:

changefeedID: default/test-1
{UpstreamID:7365353076553067605 Namespace:default ID:test-1 SinkURI:mysql://root@127.0.0.1:3306/?max-txn-row=1 CreateTime:2024-05-05 11:31:53.787993974 +0800 CST StartTs:449545478819807233 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0012359e0 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545480117420034}
{CheckpointTs:449545480497528837 MinTableBarrierTs:449545480497528837 AdminJobType:noop}



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/84625281-dc0f-4c16-836a-41bb68afb573
	{"id":"84625281-dc0f-4c16-836a-41bb68afb573","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879910}

/tidb/cdc/default/__cdc_meta__/capture/fd36eee7-f50c-472e-9e12-809928468494
	{"id":"fd36eee7-f50c-472e-9e12-809928468494","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879991}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ce942dcf
	84625281-dc0f-4c16-836a-41bb68afb573

/tidb/cdc/default/__cdc_meta__/owner/22318f46cfd974cf
	fd36eee7-f50c-472e-9e12-809928468494

/tidb/cdc/default/default/changefeed/info/test-1
	{"upstream-id":7365353076553067605,"namespace":"default","changefeed-id":"test-1","sink-uri":"mysql://root@127.0.0.1:3306/?max-txn-row=1","create-time":"2024-05-05T11:31:53.787993974+08:00","start-ts":449545478819807233,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545480117420034}

/tidb/cdc/default/default/changefeed/status/test-1
	{"checkpoint-ts":449545480497528837,"min-table-barrier-ts":449545480497528837,"admin-job-type":0}

/tidb/cdc/default/default/task/position/84625281-dc0f-4c16-836a-41bb68afb573/test-1
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/task/position/fd36eee7-f50c-472e-9e12-809928468494/test-1
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/upstream/7365353076553067605
	{"id":7365353076553067605,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** processors info ***:

changefeedID: default/test-1
{UpstreamID:7365353076553067605 Namespace:default ID:test-1 SinkURI:mysql://root@127.0.0.1:3306/?max-txn-row=1 CreateTime:2024-05-05 11:31:53.787993974 +0800 CST StartTs:449545478819807233 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0012359e0 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545480117420034}
{CheckpointTs:449545480497528837 MinTableBarrierTs:449545480497528837 AdminJobType:noop}



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/84625281-dc0f-4c16-836a-41bb68afb573
	{"id":"84625281-dc0f-4c16-836a-41bb68afb573","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879910}

/tidb/cdc/default/__cdc_meta__/capture/fd36eee7-f50c-472e-9e12-809928468494
	{"id":"fd36eee7-f50c-472e-9e12-809928468494","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879991}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ce942dcf
	84625281-dc0f-4c16-836a-41bb68afb573

/tidb/cdc/default/__cdc_meta__/owner/22318f46cfd974cf
	fd36eee7-f50c-472e-9e12-809928468494

/tidb/cdc/default/default/changefeed/info/test-1
	{"upstream-id":7365353076553067605,"namespace":"default","changefeed-id":"test-1","sink-uri":"mysql://root@127.0.0.1:3306/?max-txn-row=1","create-time":"2024-05-05T11:31:53.787993974+08:00","start-ts":449545478819807233,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545480117420034}

/tidb/cdc/default/default/changefeed/status/test-1
	{"checkpoint-ts":449545480497528837,"min-table-barrier-ts":449545480497528837,"admin-job-type":0}

/tidb/cdc/default/default/task/position/84625281-dc0f-4c16-836a-41bb68afb573/test-1
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/task/position/fd36eee7-f50c-472e-9e12-809928468494/test-1
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null+ grep -q 'failed to get info:'
}

/tidb/cdc/default/default/upstream/7365353076553067605
	{"id":7365353076553067605,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** processors info ***:

changefeedID: default/test-1
{UpstreamID:7365353076553067605 Namespace:default ID:test-1 SinkURI:mysql://root@127.0.0.1:3306/?max-txn-row=1 CreateTime:2024-05-05 11:31:53.787993974 +0800 CST StartTs:449545478819807233 TargetTs:0 AdminJobType:noop Engine:unified SortDir: Config:0xc0012359e0 State:normal Error:<nil> Warning:<nil> CreatorVersion:v8.2.0-alpha-53-g0de8dc3e4 Epoch:449545480117420034}
{CheckpointTs:449545480497528837 MinTableBarrierTs:449545480497528837 AdminJobType:noop}



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/84625281-dc0f-4c16-836a-41bb68afb573
	{"id":"84625281-dc0f-4c16-836a-41bb68afb573","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879910}

/tidb/cdc/default/__cdc_meta__/capture/fd36eee7-f50c-472e-9e12-809928468494
	{"id":"fd36eee7-f50c-472e-9e12-809928468494","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714879991}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46ce942dcf
	84625281-dc0f-4c16-836a-41bb68afb573

/tidb/cdc/default/__cdc_meta__/owner/22318f46cfd974cf
	fd36eee7-f50c-472e-9e12-809928468494

/tidb/cdc/default/default/changefeed/info/test-1
	{"upstream-id":7365353076553067605,"namespace":"default","changefeed-id":"test-1","sink-uri":"mysql://root@127.0.0.1:3306/?max-txn-row=1","create-time":"2024-05-05T11:31:53.787993974+08:00","start-ts":449545478819807233,"target-ts":0,"admin-job-type":0,"sort-engine":"","sort-dir":"","config":{"memory-quota":1073741824,"case-sensitive":false,"force-replicate":false,"check-gc-safe-point":true,"enable-sync-point":false,"enable-table-monitor":false,"ignore-ineligible-table":false,"bdr-mode":false,"sync-point-interval":600000000000,"sync-point-retention":86400000000000,"filter":{"rules":["*.*"],"ignore-txn-start-ts":null,"event-filters":null},"mounter":{"worker-num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include-commit-ts":false,"binary-encoding-method":"base64","output-old-value":false,"output-handle-key":false},"encoder-concurrency":32,"terminator":"\r\n","date-separator":"day","enable-partition-separator":true,"enable-kafka-sink-v2":false,"only-output-updated-columns":false,"delete-only-output-handle-key-columns":false,"content-compatible":false,"advance-timeout-in-sec":150,"send-bootstrap-interval-in-sec":120,"send-bootstrap-in-msg-count":10000,"send-bootstrap-to-all-partition":true,"debezium-disable-schema":false,"open":{"output-old-value":true},"debezium":{"output-old-value":true}},"consistent":{"level":"none","max-log-size":64,"flush-interval":2000,"meta-flush-interval":200,"encoding-worker-num":16,"flush-worker-num":8,"storage":"","use-file-backend":false,"compression":"","memory-usage":{"memory-quota-percentage":50}},"scheduler":{"enable-table-across-nodes":false,"region-threshold":100000,"write-key-threshold":0,"region-per-span":0},"integrity":{"integrity-check-level":"none","corruption-handle-level":"warn"},"changefeed-error-stuck-duration":1800000000000,"synced-status":{"synced-check-interval":300,"checkpoint-interval":15},"sql-mode":""},"state":"normal","error":null,"warning":null,"creator-version":"v8.2.0-alpha-53-g0de8dc3e4","epoch":449545480117420034}

/tidb/cdc/default/default/changefeed/status/test-1
	{"checkpoint-ts":449545480497528837,"min-table-barrier-ts":449545480497528837,"admin-job-type":0}

/tidb/cdc/default/default/task/position/84625281-dc0f-4c16-836a-41bb68afb573/test-1
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/task/position/fd36eee7-f50c-472e-9e12-809928468494/test-1
	{"checkpoint-ts":0,"resolved-ts":0,"count":0,"error":null,"warning":null}

/tidb/cdc/default/default/upstream/7365353076553067605
	{"id":7365353076553067605,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ SINK_URI='mysql://root@127.0.0.1:3306/?max-txn-row=1'
+ run_cdc_cli changefeed create --start-ts=449545499928952833 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-2
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.sql_mode.cli.35755.out cli changefeed create --start-ts=449545499928952833 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-2
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 175-th check, retry later
Create changefeed successfully!
ID: test-2
Info: {"upstream_id":7365353076553067605,"namespace":"default","id":"test-2","sink_uri":"mysql://root@127.0.0.1:3306/?max-txn-row=1","create_time":"2024-05-05T11:33:16.456480089+08:00","start_ts":449545499928952833,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545499928952833,"checkpoint_ts":449545499928952833,"checkpoint_time":"2024-05-05 11:33:09.353"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...

  0     0    0     0    0     0      0      0 --:--:--  0:00:15 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:16 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:17 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:18 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:19 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:20 --:--:--     0{"level":"warn","ts":"2024-05-05T11:33:17.014956+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00126a000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:33:17.018877+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0011f2000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:33:17.071157+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00131a000/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""}
{"level":"warn","ts":1714879997.1527715,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0013e16c0/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"info","ts":1714879997.1528132,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"}
{"level":"warn","ts":1714879997.2216308,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0021ace00/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""}
{"level":"info","ts":1714879997.2216792,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"}
table test.finish not exists for 176-th check, retry later
+ set +x
+ run_sql 'use test; create table t2(id bigint primary key, a date); insert into t2 values(1, '\''2023-02-08'\'');' 127.0.0.1 4000

  0     0    0     0    0     0      0      0 --:--:--  0:00:21 --:--:--     0{"level":"warn","ts":1714879998.1296272,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0023c21c0/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"info","ts":1714879998.129676,"caller":"v3@v3.5.12/client.go:210","msg":"Auto sync endpoints failed.","error":"context deadline exceeded"}
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 177-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b40ac980010	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f, pid:16481, start at 2024-05-05 11:33:18.775901904 +0800 CST m=+5.042942090	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:35:18.781 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:33:18.758 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:23:18.758 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b40ac980010	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f, pid:16481, start at 2024-05-05 11:33:18.775901904 +0800 CST m=+5.042942090	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:35:18.781 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:33:18.758 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:23:18.758 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b40aedc0007	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f, pid:16555, start at 2024-05-05 11:33:18.912093662 +0800 CST m=+5.127626391	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:35:18.918 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:33:18.903 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:23:18.903 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash-proxy.toml"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/proxy.log"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash/db/proxy"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table test.finish not exists for 178-th check, retry later
+ cd /tmp/tidb_cdc_test/synced_status_with_redo
++ run_cdc_cli_tso_query 127.0.0.1 2379
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.cli.17879.out cli tso query --pd=http://127.0.0.1:2379

  0     0    0     0    0     0      0      0 --:--:--  0:00:22 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:23 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:24 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:25 --:--:--     0{"level":"warn","ts":"2024-05-05T11:33:23.016742+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00126a000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}

  0     0    0     0    0     0      0      0 --:--:--  0:00:26 --:--:--     0{"level":"warn","ts":"2024-05-05T11:33:23.020565+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0011f2000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
{"level":"warn","ts":"2024-05-05T11:33:23.072338+0800","logger":"etcd-client","caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00131a000/127.0.0.1:2479","attempt":0,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2479: connect: connection refused\""}
table test.finish not exists for 179-th check, retry later
+ set +x
+ tso='449545503338135553
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545503338135553 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
+ start_ts=449545503338135553
+ run_cdc_server --workdir /tmp/tidb_cdc_test/synced_status_with_redo --binary cdc.test
[Sun May  5 11:33:23 CST 2024] <<<<<< START cdc server in synced_status_with_redo case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ GO_FAILPOINTS=
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.1792117923.out server --log-file /tmp/tidb_cdc_test/synced_status_with_redo/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/synced_status_with_redo/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
table test.finish not exists for 180-th check, retry later

  0     0    0     0    0     0      0      0 --:--:--  0:00:27 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:28 --:--:--     0
  0     0    0     0    0     0      0      0 --:--:--  0:00:29 --:--:--     0
100   135  100   135    0     0      4      0  0:00:33  0:00:30  0:00:03    27
100   135  100   135    0     0      4      0  0:00:33  0:00:30  0:00:03    33
+ synced_status='{
    "error_msg": "[CDC:ErrPDEtcdAPIError]etcd api call error: context deadline exceeded",
    "error_code": "CDC:ErrPDEtcdAPIError"
}'
++ echo '{' '"error_msg":' '"[CDC:ErrPDEtcdAPIError]etcd' api call error: context deadline 'exceeded",' '"error_code":' '"CDC:ErrPDEtcdAPIError"' '}'
++ jq -r .error_code
+ error_code=CDC:ErrPDEtcdAPIError
+ cleanup_process cdc.test
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:33:26 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/0d497f8a-347e-44de-89c6-693bf84ce582
	{"id":"0d497f8a-347e-44de-89c6-693bf84ce582","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714880004}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46d0037dd1
	0d497f8a-347e-44de-89c6-693bf84ce582

/tidb/cdc/default/default/upstream/7365353472512715421
	{"id":7365353472512715421,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/0d497f8a-347e-44de-89c6-693bf84ce582
	{"id":"0d497f8a-347e-44de-89c6-693bf84ce582","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714880004}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46d0037dd1
	0d497f8a-347e-44de-89c6-693bf84ce582

/tidb/cdc/default/default/upstream/7365353472512715421
	{"id":7365353472512715421,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/0d497f8a-347e-44de-89c6-693bf84ce582
	{"id":"0d497f8a-347e-44de-89c6-693bf84ce582","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714880004}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46d0037dd1
	0d497f8a-347e-44de-89c6-693bf84ce582

/tidb/cdc/default/default/upstream/7365353472512715421
	{"id":7365353472512715421,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ config_path=conf/changefeed-redo.toml
+ SINK_URI='mysql://root@127.0.0.1:3306/?max-txn-row=1'
+ run_cdc_cli changefeed create --start-ts=449545503338135553 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/conf/changefeed-redo.toml
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.cli.17979.out cli changefeed create --start-ts=449545503338135553 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/conf/changefeed-redo.toml
Create changefeed successfully!
ID: test-1
Info: {"upstream_id":7365353472512715421,"namespace":"default","id":"test-1","sink_uri":"mysql://root@127.0.0.1:3306/?max-txn-row=1","create_time":"2024-05-05T11:33:27.314355362+08:00","start_ts":449545503338135553,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"eventual","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"storage":"file:///tmp/tidb_cdc_test/synced_status/redo","use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":120,"checkpoint_interval":20}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545503338135553,"checkpoint_ts":449545503338135553,"checkpoint_time":"2024-05-05 11:33:22.358"}
PASS
coverage: 2.5% of statements in github.com/pingcap/tiflow/...
table test.finish_mark not exists for 1-th check, retry later
wait process cdc.test exit for 1-th time...
table test.finish_mark not exists for 2-th check, retry later
table test.finish not exists for 181-th check, retry later
wait process cdc.test exit for 2-th time...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
+ stop_tidb_cluster
+ set +x
+ run_sql 'USE TEST;Create table t1(a int primary key, b int);insert into t1 values(1,2);insert into t1 values(2,3);'
+ check_table_exists test.t1 127.0.0.1 3306
table test.t1 not exists for 1-th check, retry later
table test.finish_mark not exists for 3-th check, retry later
table test.finish not exists for 182-th check, retry later
table test.t1 exists
+ sleep 5
table test.finish not exists for 183-th check, retry later
table test.finish_mark not exists for 4-th check, retry later
table test.finish not exists for 184-th check, retry later
table test.finish_mark not exists for 5-th check, retry later
table test.finish_mark not exists for 6-th check, retry later
table test.finish not exists for 185-th check, retry later
+ kill_tidb
++ ps aux
++ grep tidb-server
++ grep /tmp/tidb_cdc_test/synced_status_with_redo
+ info='jenkins    16473  3.9  0.0 2506872 195628 ?      Sl   11:33   0:00 tidb-server -P 4000 -config /tmp/tidb_cdc_test/synced_status_with_redo/tidb-config-1714879993723695789.toml --store tikv --path 127.0.0.1:2379 --status=10080 --log-file /tmp/tidb_cdc_test/synced_status_with_redo/tidb.log
jenkins    16481 12.4  0.0 2845120 255684 ?      Sl   11:33   0:02 tidb-server -P 4001 -config /tmp/tidb_cdc_test/synced_status_with_redo/tidb-config-1714879993726923333.toml --store tikv --path 127.0.0.1:2379 --status=10081 --log-file /tmp/tidb_cdc_test/synced_status_with_redo/tidb_other.log
jenkins    16555 14.2  0.0 2665480 257492 ?      Sl   11:33   0:03 tidb-server -P 3306 -config /tmp/tidb_cdc_test/synced_status_with_redo/tidb-config-1714879993777083056.toml --store tikv --path 127.0.0.1:2479 --status=20080 --log-file /tmp/tidb_cdc_test/synced_status_with_redo/tidb_down.log'
++ ps aux
++ grep tidb-server
++ grep /tmp/tidb_cdc_test/synced_status_with_redo
++ awk '{print $2}'
++ xargs kill -9
++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   243  100   243    0     0   3916      0 --:--:-- --:--:-- --:--:--  3983
+ synced_status='{"synced":false,"sink_checkpoint_ts":"2024-05-05 11:33:34.759","puller_resolved_ts":"2024-05-05 11:33:28.758","last_synced_ts":"2024-05-05 11:33:29.258","now_ts":"2024-05-05 11:33:36.000","info":"The data syncing is not finished, please wait"}'
++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-05' '11:33:34.759","puller_resolved_ts":"2024-05-05' '11:33:28.758","last_synced_ts":"2024-05-05' '11:33:29.258","now_ts":"2024-05-05' '11:33:36.000","info":"The' data syncing is not finished, please 'wait"}'
++ jq .synced
+ status=false
+ '[' false '!=' false ']'
++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-05' '11:33:34.759","puller_resolved_ts":"2024-05-05' '11:33:28.758","last_synced_ts":"2024-05-05' '11:33:29.258","now_ts":"2024-05-05' '11:33:36.000","info":"The' data syncing is not finished, please 'wait"}'
++ jq -r .info
+ info='The data syncing is not finished, please wait'
+ target_message='The data syncing is not finished, please wait'
+ '[' 'The data syncing is not finished, please wait' '!=' 'The data syncing is not finished, please wait' ']'
+ sleep 130
table test.finish_mark not exists for 7-th check, retry later
table test.finish not exists for 186-th check, retry later
+ run_case_with_unavailable_tikv conf/changefeed.toml
+ rm -rf /tmp/tidb_cdc_test/synced_status
+ mkdir -p /tmp/tidb_cdc_test/synced_status
+ start_tidb_cluster --workdir /tmp/tidb_cdc_test/synced_status
shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
The 1 times to try to start tidb cluster...
shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
table test.finish_mark not exists for 8-th check, retry later
table test.finish not exists for 187-th check, retry later
chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
start tidb cluster in /tmp/tidb_cdc_test/synced_status
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
11:33AM INF > Run case=sql/debezium/skip_messages_test.sql
11:33AM INF > Run case=sql/debezium/strategy_test.sql
table test.finish not exists for 188-th check, retry later
table test.finish_mark not exists for 9-th check, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table test.finish not exists for 189-th check, retry later
table test.finish_mark not exists for 10-th check, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish_mark not exists for 11-th check, retry later
table test.finish not exists for 190-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish_mark exists
check diff successfully
table test.finish not exists for 191-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 192-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b4294740017	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3, pid:20455, start at 2024-05-05 11:33:50.004969113 +0800 CST m=+5.134001363	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:35:50.011 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:33:49.981 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:23:49.981 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b4294740017	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3, pid:20455, start at 2024-05-05 11:33:50.004969113 +0800 CST m=+5.134001363	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:35:50.011 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:33:49.981 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:23:49.981 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b42945c0005	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3, pid:20545, start at 2024-05-05 11:33:49.982418339 +0800 CST m=+5.047503172	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:35:49.988 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:33:49.975 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:23:49.975 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/synced_status/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/synced_status/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/synced_status/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/synced_status/tiflash-proxy.toml"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/synced_status/tiflash/log/proxy.log"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table test.finish not exists for 193-th check, retry later
+ cd /tmp/tidb_cdc_test/synced_status
++ run_cdc_cli_tso_query 127.0.0.1 2379
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status.cli.21999.out cli tso query --pd=http://127.0.0.1:2379
11:33AM INF > Run case=sql/debezium/table_column_comment_test.sql
table test.finish not exists for 194-th check, retry later
+ set +x
+ tso='449545511496843266
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545511496843266 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
+ start_ts=449545511496843266
+ run_cdc_server --workdir /tmp/tidb_cdc_test/synced_status --binary cdc.test
[Sun May  5 11:33:54 CST 2024] <<<<<< START cdc server in synced_status case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ GO_FAILPOINTS=
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status.2203222034.out server --log-file /tmp/tidb_cdc_test/synced_status/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/synced_status/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
table test.finish not exists for 195-th check, retry later
table test.finish not exists for 196-th check, retry later
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:33:58 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/17d0762d-f0cf-4a99-9e9f-9081f051ac89
	{"id":"17d0762d-f0cf-4a99-9e9f-9081f051ac89","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714880035}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46d07f35d0
	17d0762d-f0cf-4a99-9e9f-9081f051ac89

/tidb/cdc/default/default/upstream/7365353608666742130
	{"id":7365353608666742130,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/17d0762d-f0cf-4a99-9e9f-9081f051ac89
	{"id":"17d0762d-f0cf-4a99-9e9f-9081f051ac89","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714880035}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46d07f35d0
	17d0762d-f0cf-4a99-9e9f-9081f051ac89

/tidb/cdc/default/default/upstream/7365353608666742130
	{"id":7365353608666742130,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/17d0762d-f0cf-4a99-9e9f-9081f051ac89
	{"id":"17d0762d-f0cf-4a99-9e9f-9081f051ac89","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714880035}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46d07f35d0
	17d0762d-f0cf-4a99-9e9f-9081f051ac89

/tidb/cdc/default/default/upstream/7365353608666742130
	{"id":7365353608666742130,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ config_path=conf/changefeed.toml
+ SINK_URI='mysql://root@127.0.0.1:3306/?max-txn-row=1'
+ run_cdc_cli changefeed create --start-ts=449545511496843266 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status/conf/changefeed.toml
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status.cli.22089.out cli changefeed create --start-ts=449545511496843266 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status/conf/changefeed.toml
Create changefeed successfully!
ID: test-1
Info: {"upstream_id":7365353608666742130,"namespace":"default","id":"test-1","sink_uri":"mysql://root@127.0.0.1:3306/?max-txn-row=1","create_time":"2024-05-05T11:33:58.635427587+08:00","start_ts":449545511496843266,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":120,"checkpoint_interval":20}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545511496843266,"checkpoint_ts":449545511496843266,"checkpoint_time":"2024-05-05 11:33:53.481"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
table test.finish not exists for 197-th check, retry later
table test.finish_mark not exists for 1-th check, retry later
+ set +x
+ run_sql 'USE TEST;Create table t1(a int primary key, b int);insert into t1 values(1,2);insert into t1 values(2,3);'
+ check_table_exists test.t1 127.0.0.1 3306
table test.t1 not exists for 1-th check, retry later
11:33AM INF > Run case=sql/debezium/timestamp_column_test.sql
table test.finish_mark not exists for 2-th check, retry later
table test.finish not exists for 198-th check, retry later
table test.t1 exists
+ sleep 5
table test.finish_mark not exists for 3-th check, retry later
table test.finish not exists for 199-th check, retry later
table test.finish_mark not exists for 4-th check, retry later
table test.finish not exists for 200-th check, retry later
table test.finish_mark exists
check diff successfully
wait process cdc.test exit for 1-th time...
+ kill_tikv
table test.finish not exists for 201-th check, retry later
++ ps aux
++ grep tikv-server
++ grep /tmp/tidb_cdc_test/synced_status
+ info='jenkins    19815 30.7  0.5 4728872 2274184 ?     Sl   11:33   0:07 tikv-server --pd 127.0.0.1:2379 -A 127.0.0.1:20160 --status-addr 127.0.0.1:20181 --log-file /tmp/tidb_cdc_test/synced_status/tikv1.log --log-level debug -C /tmp/tidb_cdc_test/synced_status/tikv-config.toml -s /tmp/tidb_cdc_test/synced_status/tikv1
jenkins    19816 23.7  0.5 4690984 2202908 ?     Sl   11:33   0:05 tikv-server --pd 127.0.0.1:2379 -A 127.0.0.1:20161 --status-addr 127.0.0.1:20182 --log-file /tmp/tidb_cdc_test/synced_status/tikv2.log --log-level debug -C /tmp/tidb_cdc_test/synced_status/tikv-config.toml -s /tmp/tidb_cdc_test/synced_status/tikv2
jenkins    19817 24.0  0.5 4691496 2223720 ?     Sl   11:33   0:05 tikv-server --pd 127.0.0.1:2379 -A 127.0.0.1:20162 --status-addr 127.0.0.1:20183 --log-file /tmp/tidb_cdc_test/synced_status/tikv3.log --log-level debug -C /tmp/tidb_cdc_test/synced_status/tikv-config.toml -s /tmp/tidb_cdc_test/synced_status/tikv3
jenkins    19819 29.8  0.5 4723236 2263512 ?     Sl   11:33   0:07 tikv-server --pd 127.0.0.1:2479 -A 127.0.0.1:21160 --status-addr 127.0.0.1:21180 --log-file /tmp/tidb_cdc_test/synced_status/tikv_down.log --log-level debug -C /tmp/tidb_cdc_test/synced_status/tikv-config.toml -s /tmp/tidb_cdc_test/synced_status/tikv_down'
++ ps aux
++ grep tikv-server
++ grep /tmp/tidb_cdc_test/synced_status
++ awk '{print $2}'
++ xargs kill -9
++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   243  100   243    0     0   2542      0 --:--:-- --:--:-- --:--:--  2557
+ synced_status='{"synced":false,"sink_checkpoint_ts":"2024-05-05 11:34:06.131","puller_resolved_ts":"2024-05-05 11:34:00.081","last_synced_ts":"2024-05-05 11:34:00.131","now_ts":"2024-05-05 11:34:07.000","info":"The data syncing is not finished, please wait"}'
++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-05' '11:34:06.131","puller_resolved_ts":"2024-05-05' '11:34:00.081","last_synced_ts":"2024-05-05' '11:34:00.131","now_ts":"2024-05-05' '11:34:07.000","info":"The' data syncing is not finished, please 'wait"}'
++ jq .synced
+ status=false
+ '[' false '!=' false ']'
++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-05' '11:34:06.131","puller_resolved_ts":"2024-05-05' '11:34:00.081","last_synced_ts":"2024-05-05' '11:34:00.131","now_ts":"2024-05-05' '11:34:07.000","info":"The' data syncing is not finished, please 'wait"}'
++ jq -r .info
+ info='The data syncing is not finished, please wait'
+ target_message='The data syncing is not finished, please wait'
+ '[' 'The data syncing is not finished, please wait' '!=' 'The data syncing is not finished, please wait' ']'
+ sleep 130
11:34AM INF > Run case=sql/debezium/tinyint_test.sql
wait process cdc.test exit for 2-th time...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:34:08 CST 2024] <<<<<< run test case canal_json_basic success! >>>>>>
table test.finish not exists for 202-th check, retry later
table test.finish not exists for 203-th check, retry later
11:34AM INF > Run case=sql/debezium/topic_name_sanitization_test.sql
+ '[' kafka == mysql ']'
+ stop_tidb_cluster
table test.finish not exists for 204-th check, retry later
table test.finish not exists for 205-th check, retry later
table test.finish not exists for 206-th check, retry later
table test.finish not exists for 207-th check, retry later
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/canal_json_content_compatible/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
+ stop_tidb_cluster
table test.finish not exists for 208-th check, retry later
start tidb cluster in /tmp/tidb_cdc_test/canal_json_content_compatible
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
\033[0;36m<<< Run all test success >>>\033[0m
[Pipeline] }
Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc already exists)
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
table test.finish not exists for 209-th check, retry later
[Pipeline] // stage
[Pipeline] }
11:34AM INF > Run case=sql/debezium/unsigned_integer_test.sql
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table test.finish not exists for 210-th check, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 211-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 212-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 213-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b4538cc000d	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:28931, start at 2024-05-05 11:34:33.278717031 +0800 CST m=+5.115274847	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:36:33.285 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:34:33.267 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:24:33.267 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b4538cc000d	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:28931, start at 2024-05-05 11:34:33.278717031 +0800 CST m=+5.115274847	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:36:33.285 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:34:33.267 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:24:33.267 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b4538b00014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:29025, start at 2024-05-05 11:34:33.286767258 +0800 CST m=+5.068348527	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:36:33.293 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:34:33.260 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:24:33.260 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/canal_json_content_compatible/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/canal_json_content_compatible/tiflash/log/error.log
arg matches is ArgMatches { args: {"advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/canal_json_content_compatible/tiflash/db/proxy"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/canal_json_content_compatible/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/canal_json_content_compatible/tiflash-proxy.toml"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table test.finish not exists for 214-th check, retry later
[Sun May  5 11:34:36 CST 2024] <<<<<< START cdc server in canal_json_content_compatible case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.canal_json_content_compatible.3048630488.out server --log-file /tmp/tidb_cdc_test/canal_json_content_compatible/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/canal_json_content_compatible/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
table test.finish not exists for 215-th check, retry later
table test.finish not exists for 216-th check, retry later
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:34:39 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/aeedd5c4-e8bc-47a6-ac73-b89edfd7fdda
	{"id":"aeedd5c4-e8bc-47a6-ac73-b89edfd7fdda","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714880076}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46d1284bc3
	aeedd5c4-e8bc-47a6-ac73-b89edfd7fdda

/tidb/cdc/default/default/upstream/7365353798631202403
	{"id":7365353798631202403,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/aeedd5c4-e8bc-47a6-ac73-b89edfd7fdda
	{"id":"aeedd5c4-e8bc-47a6-ac73-b89edfd7fdda","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714880076}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46d1284bc3
	aeedd5c4-e8bc-47a6-ac73-b89edfd7fdda

/tidb/cdc/default/default/upstream/7365353798631202403
	{"id":7365353798631202403,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/aeedd5c4-e8bc-47a6-ac73-b89edfd7fdda
	{"id":"aeedd5c4-e8bc-47a6-ac73-b89edfd7fdda","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714880076}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46d1284bc3
	aeedd5c4-e8bc-47a6-ac73-b89edfd7fdda

/tidb/cdc/default/default/upstream/7365353798631202403
	{"id":7365353798631202403,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.canal_json_content_compatible.cli.30539.out cli changefeed create '--sink-uri=kafka://127.0.0.1:9092/ticdc-canal-json-content-compatible?protocol=canal-json&enable-tidb-extension=true&content-compatible=true'
Create changefeed successfully!
ID: ca0f9f56-ac29-4254-b3b8-d35f095a60d8
Info: {"upstream_id":7365353798631202403,"namespace":"default","id":"ca0f9f56-ac29-4254-b3b8-d35f095a60d8","sink_uri":"kafka://127.0.0.1:9092/ticdc-canal-json-content-compatible?protocol=canal-json\u0026enable-tidb-extension=true\u0026content-compatible=true","create_time":"2024-05-05T11:34:40.00209909+08:00","start_ts":449545523656654853,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":true,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545523656654853,"checkpoint_ts":449545523656654853,"checkpoint_time":"2024-05-05 11:34:39.867"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
table test.finish not exists for 217-th check, retry later
+ set +x
table test.finish not exists for 218-th check, retry later
table test.finish not exists for 219-th check, retry later
[Sun May  5 11:34:46 CST 2024] <<<<<< START kafka consumer in canal_json_content_compatible case >>>>>>
table test.finish not exists for 220-th check, retry later
table test.finish not exists for 221-th check, retry later
table test.finish not exists for 222-th check, retry later
table test.finish not exists for 223-th check, retry later
table test.finish not exists for 224-th check, retry later
table test.finish not exists for 225-th check, retry later
table test.finish not exists for 226-th check, retry later
table test.finish not exists for 227-th check, retry later
table test.finish not exists for 228-th check, retry later
table test.finish not exists for 229-th check, retry later
table test.finish_mark not exists for 1-th check, retry later
table test.finish_mark not exists for 2-th check, retry later
table test.finish not exists for 230-th check, retry later
table test.finish_mark not exists for 3-th check, retry later
table test.finish not exists for 231-th check, retry later
table test.finish_mark not exists for 4-th check, retry later
table test.finish not exists for 232-th check, retry later
table test.finish_mark not exists for 5-th check, retry later
table test.finish not exists for 233-th check, retry later
table test.finish not exists for 234-th check, retry later
table test.finish_mark not exists for 6-th check, retry later
table test.finish_mark not exists for 7-th check, retry later
table test.finish not exists for 235-th check, retry later
table test.finish_mark not exists for 8-th check, retry later
table test.finish not exists for 236-th check, retry later
table test.finish_mark not exists for 9-th check, retry later
table test.finish not exists for 237-th check, retry later
table test.finish not exists for 238-th check, retry later
table test.finish_mark not exists for 10-th check, retry later
table test.finish not exists for 239-th check, retry later
table test.finish_mark exists
check diff successfully
table test.finish not exists for 240-th check, retry later
table test.finish not exists for 241-th check, retry later
table test.finish not exists for 242-th check, retry later
table test.finish not exists for 243-th check, retry later
11:35AM INF > Run case=sql/dml.sql
table test.finish not exists for 244-th check, retry later
table test.finish_mark not exists for 1-th check, retry later
table test.finish not exists for 245-th check, retry later
table test.finish_mark not exists for 2-th check, retry later
table test.finish not exists for 246-th check, retry later
table test.finish_mark not exists for 3-th check, retry later
table test.finish not exists for 247-th check, retry later
table test.finish_mark not exists for 4-th check, retry later
table test.finish not exists for 248-th check, retry later
table test.finish_mark exists
check diff successfully
wait process cdc.test exit for 1-th time...
wait process cdc.test exit for 2-th time...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Sun May  5 11:35:45 CST 2024] <<<<<< run test case canal_json_content_compatible success! >>>>>>
table test.finish not exists for 249-th check, retry later
table test.finish not exists for 250-th check, retry later
table test.finish not exists for 251-th check, retry later
table test.finish not exists for 252-th check, retry later
table test.finish not exists for 253-th check, retry later
table test.finish not exists for 254-th check, retry later
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_topics/run.sh using Sink-Type: kafka... <<=================
The 1 times to try to start tidb cluster...
table test.finish not exists for 255-th check, retry later
++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   221  100   221    0     0   2545      0 --:--:-- --:--:-- --:--:--  2540
100   221  100   221    0     0   2543      0 --:--:-- --:--:-- --:--:--  2540
+ synced_status='{"synced":true,"sink_checkpoint_ts":"2024-05-05 11:35:44.909","puller_resolved_ts":"2024-05-05 11:35:38.909","last_synced_ts":"2024-05-05 11:33:29.258","now_ts":"2024-05-05 11:35:46.000","info":"Data syncing is finished"}'
++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-05' '11:35:44.909","puller_resolved_ts":"2024-05-05' '11:35:38.909","last_synced_ts":"2024-05-05' '11:33:29.258","now_ts":"2024-05-05' '11:35:46.000","info":"Data' syncing is 'finished"}'
++ jq .synced
+ status=true
+ '[' true '!=' true ']'
++ echo '{"synced":true,"sink_checkpoint_ts":"2024-05-05' '11:35:44.909","puller_resolved_ts":"2024-05-05' '11:35:38.909","last_synced_ts":"2024-05-05' '11:33:29.258","now_ts":"2024-05-05' '11:35:46.000","info":"Data' syncing is 'finished"}'
++ jq -r .info
+ info='Data syncing is finished'
+ target_message='Data syncing is finished'
+ '[' 'Data syncing is finished' '!=' 'Data syncing is finished' ']'
+ cleanup_process cdc.test
wait process cdc.test exit for 1-th time...
wait process cdc.test exit for 2-th time...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
+ stop_tidb_cluster
+ run_case_with_failpoint conf/changefeed-redo.toml
+ rm -rf /tmp/tidb_cdc_test/synced_status_with_redo
+ mkdir -p /tmp/tidb_cdc_test/synced_status_with_redo
+ start_tidb_cluster --workdir /tmp/tidb_cdc_test/synced_status_with_redo
shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
The 1 times to try to start tidb cluster...
shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
table test.finish not exists for 256-th check, retry later
table test.finish not exists for 257-th check, retry later
start tidb cluster in /tmp/tidb_cdc_test/multi_topics
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
start tidb cluster in /tmp/tidb_cdc_test/synced_status_with_redo
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table test.finish not exists for 258-th check, retry later
table test.finish not exists for 259-th check, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
table test.finish not exists for 260-th check, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 261-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 262-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 263-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b4b61100010	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:31897, start at 2024-05-05 11:36:14.165922713 +0800 CST m=+5.074044255	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:38:14.172 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:36:14.148 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:26:14.148 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b4b61100010	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:31897, start at 2024-05-05 11:36:14.165922713 +0800 CST m=+5.074044255	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:38:14.172 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:36:14.148 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:26:14.148 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b4b63480006	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-wqs8r-vmgg0, pid:31984, start at 2024-05-05 11:36:14.295723038 +0800 CST m=+5.155141057	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:38:14.302 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:36:14.290 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:26:14.290 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/multi_topics/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/multi_topics/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/multi_topics/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/multi_topics/tiflash/db/proxy"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/multi_topics/tiflash-proxy.toml"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b4b6acc0014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f, pid:19172, start at 2024-05-05 11:36:14.813014816 +0800 CST m=+5.051059073	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:38:14.820 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:36:14.820 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:26:14.820 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b4b6acc0014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f, pid:19172, start at 2024-05-05 11:36:14.813014816 +0800 CST m=+5.051059073	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:38:14.820 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:36:14.820 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:26:14.820 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b4b6cfc000d	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-lf7fd-n125f, pid:19258, start at 2024-05-05 11:36:14.922989616 +0800 CST m=+5.104443558	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:38:14.932 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:36:14.911 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:26:14.911 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/error.log
arg matches is ArgMatches { args: {"log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash/log/proxy.log"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash/db/proxy"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/synced_status_with_redo/tiflash-proxy.toml"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table test.finish not exists for 264-th check, retry later
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_topics.cli.33367.out cli tso query --pd=http://127.0.0.1:2379
+ cd /tmp/tidb_cdc_test/synced_status_with_redo
+ export 'GO_FAILPOINTS=github.com/pingcap/tiflow/cdc/owner/ChangefeedOwnerNotUpdateCheckpoint=return(true)'
+ GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/ChangefeedOwnerNotUpdateCheckpoint=return(true)'
++ run_cdc_cli_tso_query 127.0.0.1 2379
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.cli.20711.out cli tso query --pd=http://127.0.0.1:2379
table test.finish not exists for 265-th check, retry later
+ set +x
+ tso='449545549302464513
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545549302464513 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
[Sun May  5 11:36:19 CST 2024] <<<<<< START cdc server in multi_topics case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ GO_FAILPOINTS=
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_topics.3341233414.out server --log-file /tmp/tidb_cdc_test/multi_topics/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_topics/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
+ set +x
+ tso='449545549478887425
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545549478887425 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
+ start_ts=449545549478887425
+ run_cdc_server --workdir /tmp/tidb_cdc_test/synced_status_with_redo --binary cdc.test
[Sun May  5 11:36:19 CST 2024] <<<<<< START cdc server in synced_status_with_redo case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ (( i = 0 ))
+ (( i <= 50 ))
+ GO_FAILPOINTS='github.com/pingcap/tiflow/cdc/owner/ChangefeedOwnerNotUpdateCheckpoint=return(true)'
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.2074620748.out server --log-file /tmp/tidb_cdc_test/synced_status_with_redo/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/synced_status_with_redo/cdc_data --cluster-id default
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
table test.finish not exists for 266-th check, retry later
table test.finish not exists for 267-th check, retry later
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:36:22 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/adab61fd-3419-4206-b1d9-6c4f9063730c
	{"id":"adab61fd-3419-4206-b1d9-6c4f9063730c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714880179}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46d2acb2cf
	adab61fd-3419-4206-b1d9-6c4f9063730c

/tidb/cdc/default/default/upstream/7365354225416783616
	{"id":7365354225416783616,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/adab61fd-3419-4206-b1d9-6c4f9063730c
	{"id":"adab61fd-3419-4206-b1d9-6c4f9063730c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714880179}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46d2acb2cf
	adab61fd-3419-4206-b1d9-6c4f9063730c

/tidb/cdc/default/default/upstream/7365354225416783616
	{"id":7365354225416783616,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/adab61fd-3419-4206-b1d9-6c4f9063730c
	{"id":"adab61fd-3419-4206-b1d9-6c4f9063730c","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714880179}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46d2acb2cf
	adab61fd-3419-4206-b1d9-6c4f9063730c

/tidb/cdc/default/default/upstream/7365354225416783616
	{"id":7365354225416783616,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_topics.cli.33471.out cli changefeed create --start-ts=449545549302464513 '--sink-uri=kafka://127.0.0.1:9092/multi_topics?protocol=canal-json&enable-tidb-extension=true&kafka-version=2.4.1' --config /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/multi_topics/conf/changefeed.toml
Create changefeed successfully!
ID: 0a9f33ea-0731-4999-a8d1-23df4e0db2bb
Info: {"upstream_id":7365354225416783616,"namespace":"default","id":"0a9f33ea-0731-4999-a8d1-23df4e0db2bb","sink_uri":"kafka://127.0.0.1:9092/multi_topics?protocol=canal-json\u0026enable-tidb-extension=true\u0026kafka-version=2.4.1","create_time":"2024-05-05T11:36:22.70889231+08:00","start_ts":449545549302464513,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"protocol":"canal-json","csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"dispatchers":[{"matcher":["workload.*"],"topic":"workload"},{"matcher":["test.*"],"topic":"{schema}_{table}"}],"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545549302464513,"checkpoint_ts":449545549302464513,"checkpoint_time":"2024-05-05 11:36:17.698"}
PASS
coverage: 2.5% of statements in github.com/pingcap/tiflow/...
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:36:23 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/a43c6804-e023-4b99-bbe8-ef551648d4b3
	{"id":"a43c6804-e023-4b99-bbe8-ef551648d4b3","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714880180}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46d2af51ce
	a43c6804-e023-4b99-bbe8-ef551648d4b3

/tidb/cdc/default/default/upstream/7365354232683723579
	{"id":7365354232683723579,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/a43c6804-e023-4b99-bbe8-ef551648d4b3
	{"id":"a43c6804-e023-4b99-bbe8-ef551648d4b3","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714880180}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46d2af51ce
	a43c6804-e023-4b99-bbe8-ef551648d4b3

/tidb/cdc/default/default/upstream/7365354232683723579
	{"id":7365354232683723579,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/a43c6804-e023-4b99-bbe8-ef551648d4b3
	{"id":"a43c6804-e023-4b99-bbe8-ef551648d4b3","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714880180}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46d2af51ce
	a43c6804-e023-4b99-bbe8-ef551648d4b3

/tidb/cdc/default/default/upstream/7365354232683723579
	{"id":7365354232683723579,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ config_path=conf/changefeed-redo.toml
+ SINK_URI='mysql://root@127.0.0.1:3306/?max-txn-row=1'
+ run_cdc_cli changefeed create --start-ts=449545549478887425 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/conf/changefeed-redo.toml
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status_with_redo.cli.20807.out cli changefeed create --start-ts=449545549478887425 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status_with_redo/conf/changefeed-redo.toml
Create changefeed successfully!
ID: test-1
Info: {"upstream_id":7365354232683723579,"namespace":"default","id":"test-1","sink_uri":"mysql://root@127.0.0.1:3306/?max-txn-row=1","create_time":"2024-05-05T11:36:23.540693435+08:00","start_ts":449545549478887425,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"eventual","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"storage":"file:///tmp/tidb_cdc_test/synced_status/redo","use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":120,"checkpoint_interval":20}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545549478887425,"checkpoint_ts":449545549478887425,"checkpoint_time":"2024-05-05 11:36:18.371"}
PASS
coverage: 2.5% of statements in github.com/pingcap/tiflow/...
+ set +x
table test.finish not exists for 268-th check, retry later
+ set +x
+ sleep 20
table test.finish not exists for 269-th check, retry later
table test.finish not exists for 270-th check, retry later
11:36AM INF > All tests pass failed=0 passed=219
++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   723  100   723    0     0   8860      0 --:--:-- --:--:-- --:--:--  8817
100   723  100   723    0     0   8851      0 --:--:-- --:--:-- --:--:--  8817
+ synced_status='{"synced":false,"sink_checkpoint_ts":"2024-05-05 11:34:07.131","puller_resolved_ts":"2024-05-05 11:34:07.131","last_synced_ts":"2024-05-05 11:34:00.131","now_ts":"2024-05-05 11:36:17.000","info":"Please check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view '\''TiKV-Details'\'' \u003e '\''Resolved-Ts'\'' \u003e '\''Max Leader Resolved TS gap'\'' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please wait"}'
++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-05' '11:34:07.131","puller_resolved_ts":"2024-05-05' '11:34:07.131","last_synced_ts":"2024-05-05' '11:34:00.131","now_ts":"2024-05-05' '11:36:17.000","info":"Please' check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view ''\''TiKV-Details'\''' '\u003e' ''\''Resolved-Ts'\''' '\u003e' ''\''Max' Leader Resolved TS 'gap'\''' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please 'wait"}'
++ jq .synced
+ status=false
+ '[' false '!=' false ']'
++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-05' '11:34:07.131","puller_resolved_ts":"2024-05-05' '11:34:07.131","last_synced_ts":"2024-05-05' '11:34:00.131","now_ts":"2024-05-05' '11:36:17.000","info":"Please' check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view ''\''TiKV-Details'\''' '\u003e' ''\''Resolved-Ts'\''' '\u003e' ''\''Max' Leader Resolved TS 'gap'\''' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please 'wait"}'
++ jq -r .info
+ info='Please check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view '\''TiKV-Details'\'' > '\''Resolved-Ts'\'' > '\''Max Leader Resolved TS gap'\'' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please wait'
+ target_message='Please check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view '\''TiKV-Details'\'' > '\''Resolved-Ts'\'' > '\''Max Leader Resolved TS gap'\'' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please wait'
+ '[' 'Please check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view '\''TiKV-Details'\'' > '\''Resolved-Ts'\'' > '\''Max Leader Resolved TS gap'\'' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please wait' '!=' 'Please check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view '\''TiKV-Details'\'' > '\''Resolved-Ts'\'' > '\''Max Leader Resolved TS gap'\'' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please wait' ']'
+ cleanup_process cdc.test
wait process cdc.test exit for 1-th time...
wait process cdc.test exit for 2-th time...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
+ stop_tidb_cluster
+ run_case_with_unavailable_tidb conf/changefeed.toml
+ rm -rf /tmp/tidb_cdc_test/synced_status
+ mkdir -p /tmp/tidb_cdc_test/synced_status
+ start_tidb_cluster --workdir /tmp/tidb_cdc_test/synced_status
shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
The 1 times to try to start tidb cluster...
shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
shell-init: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
table test.finish not exists for 271-th check, retry later
table test.finish not exists for 272-th check, retry later
chdir: error retrieving current directory: getcwd: cannot access parent directories: No such file or directory
start tidb cluster in /tmp/tidb_cdc_test/synced_status
Starting Upstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Starting Downstream PD...
Release Version: v8.2.0-alpha-14-g1679dbca2
Edition: Community
Git Commit Hash: 1679dbca25b3483d1375c7e747da27e99ad77360
Git Branch: master
UTC Build Time:  2024-04-30 08:09:12
Verifying upstream PD is started...
table test.finish not exists for 273-th check, retry later
table test.finish not exists for 274-th check, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   8.2.0-alpha
Edition:           Community
Git Commit Hash:   72a0fd5b00235a7c56014b77ddd933e2a0d33c88
Git Commit Branch: master
UTC Build Time:    2024-04-30 02:23:51
Rust Version:      rustc 1.77.0-nightly (89e2160c4 2023-12-27)
Enable Features:   memory-engine pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine trace-async-tasks openssl-vendored
Profile:           dist_release
[Sun May  5 11:36:36 CST 2024] <<<<<< run test case debezium success! >>>>>>
table test.finish not exists for 275-th check, retry later
Starting Upstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v8.2.0-alpha-79-g600b2ed4bf
Edition: Community
Git Commit Hash: 600b2ed4bf0aa38224a1c4c4c68831820735515c
Git Branch: master
UTC Build Time: 2024-05-01 02:56:48
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 276-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 277-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table test.finish not exists for 278-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b4d2f5c0014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3, pid:23285, start at 2024-05-05 11:36:43.757466871 +0800 CST m=+5.075736767	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:38:43.766 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:36:43.735 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:26:43.735 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b4d2f5c0014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3, pid:23285, start at 2024-05-05 11:36:43.757466871 +0800 CST m=+5.075736767	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:38:43.766 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:36:43.735 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:26:43.735 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	196	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63d1b4d30100014	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:pingcap-tiflow-pull-cdc-integration-kafka-test-1855-3nsj2-f79k3, pid:23361, start at 2024-05-05 11:36:43.797653339 +0800 CST m=+5.064382354	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240505-11:38:43.805 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240505-11:36:43.780 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240505-11:26:43.780 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v8.2.0-alpha-16-g8e170090f
Edition:         Community
Git Commit Hash: 8e170090fad91c94bef8d908e21c195c1d145b02
Git Branch:      HEAD
UTC Build Time:  2024-04-30 02:34:21
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO
Compiler:        clang++ 13.0.0

Raft Proxy
Git Commit Hash:   7dc50b4eb06124e31f03adb06c20ff7ab61c5f79
Git Commit Branch: HEAD
UTC Build Time:    2024-04-30 02:38:45
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:   external-jemalloc portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure openssl-vendored
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/synced_status/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/synced_status/tiflash/log/error.log
arg matches is ArgMatches { args: {"pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/synced_status/tiflash/log/proxy.log"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/synced_status/tiflash-proxy.toml"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/synced_status/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["8e170090fad91c94bef8d908e21c195c1d145b02"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v8.2.0-alpha-16-g8e170090f"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
table test.finish not exists for 279-th check, retry later
++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   723  100   723    0     0   9397      0 --:--:-- --:--:-- --:--:--  9513
+ synced_status='{"synced":false,"sink_checkpoint_ts":"2024-05-05 11:36:18.371","puller_resolved_ts":"1970-01-01 08:00:00.000","last_synced_ts":"1970-01-01 08:00:00.000","now_ts":"2024-05-05 11:36:45.000","info":"Please check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view '\''TiKV-Details'\'' \u003e '\''Resolved-Ts'\'' \u003e '\''Max Leader Resolved TS gap'\'' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please wait"}'
++ jq .synced
++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-05' '11:36:18.371","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-05-05' '11:36:45.000","info":"Please' check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view ''\''TiKV-Details'\''' '\u003e' ''\''Resolved-Ts'\''' '\u003e' ''\''Max' Leader Resolved TS 'gap'\''' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please 'wait"}'
+ status=false
+ '[' false '!=' false ']'
++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-05' '11:36:18.371","puller_resolved_ts":"1970-01-01' '08:00:00.000","last_synced_ts":"1970-01-01' '08:00:00.000","now_ts":"2024-05-05' '11:36:45.000","info":"Please' check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view ''\''TiKV-Details'\''' '\u003e' ''\''Resolved-Ts'\''' '\u003e' ''\''Max' Leader Resolved TS 'gap'\''' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please 'wait"}'
++ jq -r .info
+ info='Please check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view '\''TiKV-Details'\'' > '\''Resolved-Ts'\'' > '\''Max Leader Resolved TS gap'\'' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please wait'
+ target_message='Please check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view '\''TiKV-Details'\'' > '\''Resolved-Ts'\'' > '\''Max Leader Resolved TS gap'\'' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please wait'
+ '[' 'Please check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view '\''TiKV-Details'\'' > '\''Resolved-Ts'\'' > '\''Max Leader Resolved TS gap'\'' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please wait' '!=' 'Please check whether PD is online and TiKV Regions are all available. If PD is offline or some TiKV regions are not available, it means that the data syncing process is complete. To check whether TiKV regions are all available, you can view '\''TiKV-Details'\'' > '\''Resolved-Ts'\'' > '\''Max Leader Resolved TS gap'\'' on Grafana. If the gap is large, such as a few minutes, it means that some regions in TiKV are unavailable. Otherwise, if the gap is small and PD is online, it means the data syncing is incomplete, so please wait' ']'
+ export GO_FAILPOINTS=
+ GO_FAILPOINTS=
+ cleanup_process cdc.test
wait process cdc.test exit for 1-th time...
wait process cdc.test exit for 2-th time...
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
+ stop_tidb_cluster
+ cd /tmp/tidb_cdc_test/synced_status
++ run_cdc_cli_tso_query 127.0.0.1 2379
+ pd_host=127.0.0.1
+ pd_port=2379
+ is_tls=false
+ '[' false == true ']'
++ run_cdc_cli tso query --pd=http://127.0.0.1:2379
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status.cli.24810.out cli tso query --pd=http://127.0.0.1:2379
table test.finish not exists for 280-th check, retry later
+ set +x
+ tso='449545557071888385
PASS
coverage: 1.8% of statements in github.com/pingcap/tiflow/...'
+ echo 449545557071888385 PASS coverage: 1.8% of statements in github.com/pingcap/tiflow/...
+ awk -F ' ' '{print $1}'
+ set +x
+ start_ts=449545557071888385
+ run_cdc_server --workdir /tmp/tidb_cdc_test/synced_status --binary cdc.test
[Sun May  5 11:36:48 CST 2024] <<<<<< START cdc server in synced_status case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ GO_FAILPOINTS=
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL'
+ [[ no != \n\o ]]
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status.2484924851.out server --log-file /tmp/tidb_cdc_test/synced_status/cdc.log --log-level debug --data-dir /tmp/tidb_cdc_test/synced_status/cdc_data --cluster-id default
+ (( i = 0 ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8300; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/lossy_ddl/run.sh using Sink-Type: kafka... <<=================
[Sun May  5 11:36:48 CST 2024] <<<<<< run test case lossy_ddl success! >>>>>>
table test.finish not exists for 281-th check, retry later
=================>> Running test /home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/storage_csv_update/run.sh using Sink-Type: kafka... <<=================
[Sun May  5 11:36:51 CST 2024] <<<<<< run test case storage_csv_update success! >>>>>>
table test.finish not exists for 282-th check, retry later
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8300/debug/info --user ticdc:ticdc_secret -vsL
* About to connect() to 127.0.0.1 port 8300 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8300 (#0)
* Server auth using Basic with user 'ticdc'
> GET /debug/info HTTP/1.1
> Authorization: Basic dGljZGM6dGljZGNfc2VjcmV0
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8300
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Sun, 05 May 2024 03:36:51 GMT
< Content-Length: 815
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/6d562892-1b3a-4083-a59d-a3a5751c9e3a
	{"id":"6d562892-1b3a-4083-a59d-a3a5751c9e3a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714880209}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46d320a8ce
	6d562892-1b3a-4083-a59d-a3a5751c9e3a

/tidb/cdc/default/default/upstream/7365354354337617031
	{"id":7365354354337617031,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/6d562892-1b3a-4083-a59d-a3a5751c9e3a
	{"id":"6d562892-1b3a-4083-a59d-a3a5751c9e3a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714880209}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46d320a8ce
	6d562892-1b3a-4083-a59d-a3a5751c9e3a

/tidb/cdc/default/default/upstream/7365354354337617031
	{"id":7365354354337617031,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** owner info ***:



*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/6d562892-1b3a-4083-a59d-a3a5751c9e3a
	{"id":"6d562892-1b3a-4083-a59d-a3a5751c9e3a","address":"127.0.0.1:8300","version":"v8.2.0-alpha-53-g0de8dc3e4","git-hash":"0de8dc3e43ec741eba58047155ce7f3dba8eb4f7","deploy-path":"/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/bin/cdc.test","start-timestamp":1714880209}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f46d320a8ce
	6d562892-1b3a-4083-a59d-a3a5751c9e3a

/tidb/cdc/default/default/upstream/7365354354337617031
	{"id":7365354354337617031,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ config_path=conf/changefeed.toml
+ SINK_URI='mysql://root@127.0.0.1:3306/?max-txn-row=1'
+ run_cdc_cli changefeed create --start-ts=449545557071888385 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status/conf/changefeed.toml
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.synced_status.cli.24911.out cli changefeed create --start-ts=449545557071888385 '--sink-uri=mysql://root@127.0.0.1:3306/?max-txn-row=1' --changefeed-id=test-1 --config=/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status/conf/changefeed.toml
Create changefeed successfully!
ID: test-1
Info: {"upstream_id":7365354354337617031,"namespace":"default","id":"test-1","sink_uri":"mysql://root@127.0.0.1:3306/?max-txn-row=1","create_time":"2024-05-05T11:36:52.305498738+08:00","start_ts":449545557071888385,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"enable_table_monitor":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64","output_old_value":false,"output_handle_key":false},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"content_compatible":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true,"debezium_disable_schema":false,"debezium":{"output_old_value":true},"open":{"output_old_value":true}},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"synced_status":{"synced_check_interval":120,"checkpoint_interval":20}},"state":"normal","creator_version":"v8.2.0-alpha-53-g0de8dc3e4","resolved_ts":449545557071888385,"checkpoint_ts":449545557071888385,"checkpoint_time":"2024-05-05 11:36:47.336"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
+ set +x
+ run_sql 'USE TEST;Create table t1(a int primary key, b int);insert into t1 values(1,2);insert into t1 values(2,3);'
+ check_table_exists test.t1 127.0.0.1 3306
table test.t1 not exists for 1-th check, retry later
table test.finish not exists for 283-th check, retry later
\033[0;36m<<< Run all test success >>>\033[0m
[Pipeline] }
Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc already exists)
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
table test.finish not exists for 284-th check, retry later
table test.t1 exists
+ sleep 5
+ check_logs /tmp/tidb_cdc_test/synced_status_with_redo
++ date
+ echo '[Sun May  5 11:36:56 CST 2024] <<<<<< run test case synced_status_with_redo success! >>>>>>'
[Sun May  5 11:36:56 CST 2024] <<<<<< run test case synced_status_with_redo success! >>>>>>
+ stop_tidb_cluster
table test.finish not exists for 285-th check, retry later
\033[0;36m<<< Run all test success >>>\033[0m
[Pipeline] }
Cache not saved (ws/jenkins-pingcap-tiflow-pull_cdc_integration_kafka_test-1855/tiflow-cdc already exists)
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] }
table test.finish not exists for 286-th check, retry later
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
+ kill_tidb
++ ps aux
++ grep tidb-server
++ grep /tmp/tidb_cdc_test/synced_status
+ info='jenkins    23285 13.5  0.0 2730440 252812 ?      Sl   11:36   0:02 tidb-server -P 4000 -config /tmp/tidb_cdc_test/synced_status/tidb-config-1714880198675340784.toml --store tikv --path 127.0.0.1:2379 --status=10080 --log-file /tmp/tidb_cdc_test/synced_status/tidb.log
jenkins    23289  3.7  0.0 2416748 192152 ?      Sl   11:36   0:00 tidb-server -P 4001 -config /tmp/tidb_cdc_test/synced_status/tidb-config-1714880198678252366.toml --store tikv --path 127.0.0.1:2379 --status=10081 --log-file /tmp/tidb_cdc_test/synced_status/tidb_other.log
jenkins    23361 13.5  0.0 2634316 275292 ?      Sl   11:36   0:02 tidb-server -P 3306 -config /tmp/tidb_cdc_test/synced_status/tidb-config-1714880198726039768.toml --store tikv --path 127.0.0.1:2479 --status=20080 --log-file /tmp/tidb_cdc_test/synced_status/tidb_down.log'
++ ps aux
++ grep tidb-server
++ grep /tmp/tidb_cdc_test/synced_status
++ awk '{print $2}'
++ xargs kill -9
++ curl -X GET http://127.0.0.1:8300/api/v2/changefeeds/test-1/synced
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   243  100   243    0     0   5519      0 --:--:-- --:--:-- --:--:--  5651
+ synced_status='{"synced":false,"sink_checkpoint_ts":"2024-05-05 11:36:59.736","puller_resolved_ts":"2024-05-05 11:36:53.736","last_synced_ts":"2024-05-05 11:36:53.835","now_ts":"2024-05-05 11:37:00.000","info":"The data syncing is not finished, please wait"}'
++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-05' '11:36:59.736","puller_resolved_ts":"2024-05-05' '11:36:53.736","last_synced_ts":"2024-05-05' '11:36:53.835","now_ts":"2024-05-05' '11:37:00.000","info":"The' data syncing is not finished, please 'wait"}'
++ jq .synced
+ status=false
+ '[' false '!=' false ']'
++ echo '{"synced":false,"sink_checkpoint_ts":"2024-05-05' '11:36:59.736","puller_resolved_ts":"2024-05-05' '11:36:53.736","last_synced_ts":"2024-05-05' '11:36:53.835","now_ts":"2024-05-05' '11:37:00.000","info":"The' data syncing is not finished, please 'wait"}'
++ jq -r .info
+ info='The data syncing is not finished, please wait'
+ target_message='The data syncing is not finished, please wait'
+ '[' 'The data syncing is not finished, please wait' '!=' 'The data syncing is not finished, please wait' ']'
+ sleep 130
table test.finish not exists for 287-th check, retry later
table test.finish not exists for 288-th check, retry later
table test.finish not exists for 289-th check, retry later
table test.finish not exists for 290-th check, retry later
table test.finish not exists for 291-th check, retry later
table test.finish not exists for 292-th check, retry later
table test.finish not exists for 293-th check, retry later
table test.finish not exists for 294-th check, retry later
table test.finish not exists for 295-th check, retry later
table test.finish not exists for 296-th check, retry later
table test.finish not exists for 297-th check, retry later
table test.finish not exists for 298-th check, retry later
table test.finish not exists for 299-th check, retry later
table test.finish not exists for 300-th check, retry later
table test.finish not exists at last check
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
Post stage
[Pipeline] sh
+ ls /tmp/tidb_cdc_test/
consistent_partition_table
consistent_replicate_ddl
consistent_replicate_gbk
consistent_replicate_storage_file
consistent_replicate_storage_file_large_value
consistent_replicate_storage_s3
cov.kafka_big_messages_v2.36373639.out
cov.multi_tables_ddl_v2.74627464.out
cov.multi_topics_v2.cli.10519.out
cov.multi_topics_v2.cli.10614.out
kafka_big_messages
kafka_big_messages_v2
multi_tables_ddl
multi_tables_ddl_v2
multi_topics
multi_topics_v2
sql_res.kafka_big_messages_v2.txt
sql_res.multi_tables_ddl_v2.txt
sql_res.multi_topics_v2.txt
++ find /tmp/tidb_cdc_test/ -type f -name '*.log'
+ tar -cvzf log-G02.tar.gz /tmp/tidb_cdc_test/multi_topics/output/sync_diff.log /tmp/tidb_cdc_test/multi_topics_v2/cdc_kafka_consumer.log /tmp/tidb_cdc_test/multi_topics_v2/tikv2/db/000005.log /tmp/tidb_cdc_test/multi_topics_v2/sync_diff_inspector.log /tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/server.log /tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/error.log /tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/proxy.log /tmp/tidb_cdc_test/multi_topics_v2/tiflash/db/proxy/db/000005.log /tmp/tidb_cdc_test/multi_topics_v2/tidb_other.log /tmp/tidb_cdc_test/multi_topics_v2/tidb.log /tmp/tidb_cdc_test/multi_topics_v2/tidb-slow.log /tmp/tidb_cdc_test/multi_topics_v2/pd1.log /tmp/tidb_cdc_test/multi_topics_v2/down_pd.log /tmp/tidb_cdc_test/multi_topics_v2/cdc_data/tmp/sorter/0002/000002.log /tmp/tidb_cdc_test/multi_topics_v2/cdc_data/tmp/sorter/0005/000002.log /tmp/tidb_cdc_test/multi_topics_v2/cdc_data/tmp/sorter/0006/000002.log /tmp/tidb_cdc_test/multi_topics_v2/cdc_data/tmp/sorter/0004/000002.log /tmp/tidb_cdc_test/multi_topics_v2/cdc_data/tmp/sorter/0003/000002.log /tmp/tidb_cdc_test/multi_topics_v2/cdc_data/tmp/sorter/0001/000002.log /tmp/tidb_cdc_test/multi_topics_v2/cdc_data/tmp/sorter/0007/000002.log /tmp/tidb_cdc_test/multi_topics_v2/cdc_data/tmp/sorter/0000/000002.log /tmp/tidb_cdc_test/multi_topics_v2/tikv1/db/000005.log /tmp/tidb_cdc_test/multi_topics_v2/cdc_kafka_consumer_stdout.log /tmp/tidb_cdc_test/multi_topics_v2/tikv3/db/000005.log /tmp/tidb_cdc_test/multi_topics_v2/tikv_down.log /tmp/tidb_cdc_test/multi_topics_v2/tidb_down.log /tmp/tidb_cdc_test/multi_topics_v2/down_pd/region-meta/000001.log /tmp/tidb_cdc_test/multi_topics_v2/down_pd/hot-region/000001.log /tmp/tidb_cdc_test/multi_topics_v2/tikv3.log /tmp/tidb_cdc_test/multi_topics_v2/tikv2.log /tmp/tidb_cdc_test/multi_topics_v2/stdout.log /tmp/tidb_cdc_test/multi_topics_v2/pd1/region-meta/000001.log /tmp/tidb_cdc_test/multi_topics_v2/pd1/hot-region/000001.log /tmp/tidb_cdc_test/multi_topics_v2/tikv_down/db/000005.log /tmp/tidb_cdc_test/multi_topics_v2/cdc.log /tmp/tidb_cdc_test/multi_topics_v2/tikv1.log /tmp/tidb_cdc_test/multi_tables_ddl_v2/cdc_kafka_consumer.log /tmp/tidb_cdc_test/multi_tables_ddl_v2/sync_diff_inspector.log /tmp/tidb_cdc_test/multi_tables_ddl_v2/tidb_other.log /tmp/tidb_cdc_test/multi_tables_ddl_v2/tidb.log /tmp/tidb_cdc_test/multi_tables_ddl_v2/tidb-slow.log /tmp/tidb_cdc_test/multi_tables_ddl_v2/pd1.log /tmp/tidb_cdc_test/multi_tables_ddl_v2/down_pd.log /tmp/tidb_cdc_test/multi_tables_ddl_v2/cdc_kafka_consumer_stdout.log /tmp/tidb_cdc_test/multi_tables_ddl_v2/tikv_down.log /tmp/tidb_cdc_test/multi_tables_ddl_v2/tidb_down.log /tmp/tidb_cdc_test/multi_tables_ddl_v2/tikv3.log /tmp/tidb_cdc_test/multi_tables_ddl_v2/tikv2.log /tmp/tidb_cdc_test/multi_tables_ddl_v2/stdout.log /tmp/tidb_cdc_test/multi_tables_ddl_v2/cdc.log /tmp/tidb_cdc_test/multi_tables_ddl_v2/tikv1.log /tmp/tidb_cdc_test/kafka_big_messages_v2/cdc_kafka_consumer.log /tmp/tidb_cdc_test/kafka_big_messages_v2/sync_diff_inspector.log /tmp/tidb_cdc_test/kafka_big_messages_v2/tidb_other.log /tmp/tidb_cdc_test/kafka_big_messages_v2/tidb.log /tmp/tidb_cdc_test/kafka_big_messages_v2/tidb-slow.log /tmp/tidb_cdc_test/kafka_big_messages_v2/pd1.log /tmp/tidb_cdc_test/kafka_big_messages_v2/down_pd.log /tmp/tidb_cdc_test/kafka_big_messages_v2/cdc_kafka_consumer_stdout.log /tmp/tidb_cdc_test/kafka_big_messages_v2/tikv_down.log /tmp/tidb_cdc_test/kafka_big_messages_v2/tidb_down.log /tmp/tidb_cdc_test/kafka_big_messages_v2/tikv3.log /tmp/tidb_cdc_test/kafka_big_messages_v2/tikv2.log /tmp/tidb_cdc_test/kafka_big_messages_v2/stdout.log /tmp/tidb_cdc_test/kafka_big_messages_v2/cdc.log /tmp/tidb_cdc_test/kafka_big_messages_v2/tikv1.log
tar: Removing leading `/' from member names
/tmp/tidb_cdc_test/multi_topics/output/sync_diff.log
/tmp/tidb_cdc_test/multi_topics_v2/cdc_kafka_consumer.log
/tmp/tidb_cdc_test/multi_topics_v2/tikv2/db/000005.log
/tmp/tidb_cdc_test/multi_topics_v2/sync_diff_inspector.log
/tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/server.log
/tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/error.log
/tmp/tidb_cdc_test/multi_topics_v2/tiflash/log/proxy.log
/tmp/tidb_cdc_test/multi_topics_v2/tiflash/db/proxy/db/000005.log
/tmp/tidb_cdc_test/multi_topics_v2/tidb_other.log
/tmp/tidb_cdc_test/multi_topics_v2/tidb.log
/tmp/tidb_cdc_test/multi_topics_v2/tidb-slow.log
/tmp/tidb_cdc_test/multi_topics_v2/pd1.log
/tmp/tidb_cdc_test/multi_topics_v2/down_pd.log
/tmp/tidb_cdc_test/multi_topics_v2/cdc_data/tmp/sorter/0002/000002.log
/tmp/tidb_cdc_test/multi_topics_v2/cdc_data/tmp/sorter/0005/000002.log
/tmp/tidb_cdc_test/multi_topics_v2/cdc_data/tmp/sorter/0006/000002.log
/tmp/tidb_cdc_test/multi_topics_v2/cdc_data/tmp/sorter/0004/000002.log
/tmp/tidb_cdc_test/multi_topics_v2/cdc_data/tmp/sorter/0003/000002.log
/tmp/tidb_cdc_test/multi_topics_v2/cdc_data/tmp/sorter/0001/000002.log
/tmp/tidb_cdc_test/multi_topics_v2/cdc_data/tmp/sorter/0007/000002.log
/tmp/tidb_cdc_test/multi_topics_v2/cdc_data/tmp/sorter/0000/000002.log
/tmp/tidb_cdc_test/multi_topics_v2/tikv1/db/000005.log
/tmp/tidb_cdc_test/multi_topics_v2/cdc_kafka_consumer_stdout.log
/tmp/tidb_cdc_test/multi_topics_v2/tikv3/db/000005.log
/tmp/tidb_cdc_test/multi_topics_v2/tikv_down.log
/tmp/tidb_cdc_test/multi_topics_v2/tidb_down.log
/tmp/tidb_cdc_test/multi_topics_v2/down_pd/region-meta/000001.log
/tmp/tidb_cdc_test/multi_topics_v2/down_pd/hot-region/000001.log
/tmp/tidb_cdc_test/multi_topics_v2/tikv3.log
/tmp/tidb_cdc_test/multi_topics_v2/tikv2.log
/tmp/tidb_cdc_test/multi_topics_v2/stdout.log
/tmp/tidb_cdc_test/multi_topics_v2/pd1/region-meta/000001.log
/tmp/tidb_cdc_test/multi_topics_v2/pd1/hot-region/000001.log
/tmp/tidb_cdc_test/multi_topics_v2/tikv_down/db/000005.log
/tmp/tidb_cdc_test/multi_topics_v2/cdc.log
/tmp/tidb_cdc_test/multi_topics_v2/tikv1.log
/tmp/tidb_cdc_test/multi_tables_ddl_v2/cdc_kafka_consumer.log
/tmp/tidb_cdc_test/multi_tables_ddl_v2/sync_diff_inspector.log
/tmp/tidb_cdc_test/multi_tables_ddl_v2/tidb_other.log
/tmp/tidb_cdc_test/multi_tables_ddl_v2/tidb.log
/tmp/tidb_cdc_test/multi_tables_ddl_v2/tidb-slow.log
/tmp/tidb_cdc_test/multi_tables_ddl_v2/pd1.log
/tmp/tidb_cdc_test/multi_tables_ddl_v2/down_pd.log
/tmp/tidb_cdc_test/multi_tables_ddl_v2/cdc_kafka_consumer_stdout.log
/tmp/tidb_cdc_test/multi_tables_ddl_v2/tikv_down.log
/tmp/tidb_cdc_test/multi_tables_ddl_v2/tidb_down.log
/tmp/tidb_cdc_test/multi_tables_ddl_v2/tikv3.log
/tmp/tidb_cdc_test/multi_tables_ddl_v2/tikv2.log
/tmp/tidb_cdc_test/multi_tables_ddl_v2/stdout.log
/tmp/tidb_cdc_test/multi_tables_ddl_v2/cdc.log
/tmp/tidb_cdc_test/multi_tables_ddl_v2/tikv1.log
/tmp/tidb_cdc_test/kafka_big_messages_v2/cdc_kafka_consumer.log
/tmp/tidb_cdc_test/kafka_big_messages_v2/sync_diff_inspector.log
/tmp/tidb_cdc_test/kafka_big_messages_v2/tidb_other.log
/tmp/tidb_cdc_test/kafka_big_messages_v2/tidb.log
/tmp/tidb_cdc_test/kafka_big_messages_v2/tidb-slow.log
/tmp/tidb_cdc_test/kafka_big_messages_v2/pd1.log
/tmp/tidb_cdc_test/kafka_big_messages_v2/down_pd.log
/tmp/tidb_cdc_test/kafka_big_messages_v2/cdc_kafka_consumer_stdout.log
/tmp/tidb_cdc_test/kafka_big_messages_v2/tikv_down.log
/tmp/tidb_cdc_test/kafka_big_messages_v2/tidb_down.log
/tmp/tidb_cdc_test/kafka_big_messages_v2/tikv3.log
/tmp/tidb_cdc_test/kafka_big_messages_v2/tikv2.log
/tmp/tidb_cdc_test/kafka_big_messages_v2/stdout.log
/tmp/tidb_cdc_test/kafka_big_messages_v2/cdc.log
/tmp/tidb_cdc_test/kafka_big_messages_v2/tikv1.log
+ ls -alh log-G02.tar.gz
-rw-r--r-- 1 jenkins jenkins 15M May  5 11:37 log-G02.tar.gz
[Pipeline] archiveArtifacts
Archiving artifacts
Recording fingerprints
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G02'
Sending interrupt signal to process
Killing processes
kill finished with exit code 0
Sending interrupt signal to process
Killing processes
kill finished with exit code 0
{"level":"warn","ts":1714880278.0637343,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0016f9500/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: EOF"}
{"level":"warn","ts":1714880280.0642738,"caller":"v3@v3.5.12/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0016f9500/127.0.0.1:2379","attempt":1,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
script returned exit code 143
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G01'
++ stop_tidb_cluster
/home/jenkins/agent/workspace/pingcap/tiflow/pull_cdc_integration_kafka_test/tiflow/tests/integration_tests/synced_status/run.sh: line 1: 24988 Terminated              sleep 130
script returned exit code 143
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G09'
[Pipeline] // parallel
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] End of Pipeline
ERROR: script returned exit code 1
Finished: FAILURE