Skip to content

Console Output

Started by user Jenkins Admin
Obtained pipelines/pingcap/tiflow/release-7.5/pull_cdc_integration_kafka_test.groovy from git https://github.com/PingCAP-QE/ci.git
Loading library tipipeline@main
Library tipipeline@main is cached. Copying from home.
[Pipeline] Start of Pipeline
[Pipeline] readJSON
[Pipeline] readTrusted
Obtained pipelines/pingcap/tiflow/release-7.5/pod-pull_cdc_integration_kafka_test.yaml from git https://github.com/PingCAP-QE/ci.git
[Pipeline] podTemplate
[Pipeline] {
[Pipeline] node
Created Pod: kubernetes jenkins-tiflow/ap-tiflow-release-7-5-pull-cdc-integration-kafka-test-543-z3st2
Agent ap-tiflow-release-7-5-pull-cdc-integration-kafka-test-543-z3st2 is provisioned from template ap_tiflow_release-7_5_pull_cdc_integration_kafka_test_543-99slp-x8s7q
---
apiVersion: "v1"
kind: "Pod"
metadata:
  annotations:
    buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tiflow/job/release-7.5/job/pull_cdc_integration_kafka_test/543/"
    runUrl: "job/pingcap/job/tiflow/job/release-7.5/job/pull_cdc_integration_kafka_test/543/"
  labels:
    jenkins/jenkins-jenkins-agent: "true"
    jenkins/label-digest: "d2bf08955f8b018a2952a0a362ee7e0b0f17bc7f"
    jenkins/label: "ap_tiflow_release-7_5_pull_cdc_integration_kafka_test_543-99slp"
  name: "ap-tiflow-release-7-5-pull-cdc-integration-kafka-test-543-z3st2"
  namespace: "jenkins-tiflow"
spec:
  affinity:
    nodeAffinity:
      requiredDuringSchedulingIgnoredDuringExecution:
        nodeSelectorTerms:
        - matchExpressions:
          - key: "kubernetes.io/arch"
            operator: "In"
            values:
            - "amd64"
  containers:
  - image: "wurstmeister/zookeeper"
    imagePullPolicy: "IfNotPresent"
    name: "zookeeper"
    resources:
      limits:
        cpu: "2000m"
        memory: "4Gi"
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - args:
    - "cat"
    image: "hub.pingcap.net/jenkins/golang-tini:1.21"
    imagePullPolicy: "Always"
    name: "golang"
    resources:
      limits:
        cpu: "4"
        memory: "16Gi"
      requests:
        cpu: "2"
        memory: "12Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_CREATE_TOPICS"
      value: "big-message-test:1:1"
    - name: "KAFKA_BROKER_ID"
      value: "1"
    - name: "KAFKA_SSL_KEYSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_ZOOKEEPER_CONNECT"
      value: "localhost:2181"
    - name: "KAFKA_MESSAGE_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_REPLICA_FETCH_MAX_BYTES"
      value: "11534336"
    - name: "KAFKA_ADVERTISED_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "ZK"
      value: "zk"
    - name: "KAFKA_SSL_KEYSTORE_LOCATION"
      value: "/tmp/kafka.server.keystore.jks"
    - name: "KAFKA_SSL_KEY_PASSWORD"
      value: "test1234"
    - name: "KAFKA_SSL_TRUSTSTORE_PASSWORD"
      value: "test1234"
    - name: "KAFKA_LISTENERS"
      value: "SSL://127.0.0.1:9093,PLAINTEXT://127.0.0.1:9092"
    - name: "KAFKA_SSL_TRUSTSTORE_LOCATION"
      value: "/tmp/kafka.server.truststore.jks"
    - name: "RACK_COMMAND"
      value: "curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.keystore.jks\
        \ -o /tmp/kafka.server.keystore.jks && curl -sfL https://github.com/pingcap/tiflow/raw/6e62afcfecc4e3965d8818784327d4bf2600d9fa/tests/_certificates/kafka.server.truststore.jks\
        \ -o /tmp/kafka.server.truststore.jks"
    image: "wurstmeister/kafka:2.12-2.4.1"
    imagePullPolicy: "IfNotPresent"
    name: "kafka"
    resources:
      limits:
        cpu: "2000m"
        memory: "4Gi"
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "KAFKA_SERVER"
      value: "127.0.0.1:9092"
    - name: "ZOOKEEPER_SERVER"
      value: "127.0.0.1:2181"
    - name: "DOWNSTREAM_DB_HOST"
      value: "127.0.0.1"
    - name: "USE_FLAT_MESSAGE"
      value: "true"
    - name: "DOWNSTREAM_DB_PORT"
      value: "3306"
    - name: "DB_NAME"
      value: "test"
    image: "rustinliu/ticdc-canal-json-adapter:latest"
    imagePullPolicy: "IfNotPresent"
    name: "canal-adapter"
    resources:
      requests:
        cpu: "200m"
        memory: "4Gi"
    tty: true
    volumeMounts:
    - mountPath: "/tmp"
      name: "volume-0"
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/network-multitool"
    name: "net-tool"
    resources:
      limits:
        memory: "128Mi"
        cpu: "100m"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - image: "hub.pingcap.net/jenkins/python3-requests:latest"
    name: "report"
    resources:
      limits:
        memory: "256Mi"
        cpu: "100m"
    tty: true
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  - env:
    - name: "JENKINS_SECRET"
      value: "********"
    - name: "JENKINS_TUNNEL"
      value: "jenkins-agent.apps.svc.cluster.local:50000"
    - name: "JENKINS_AGENT_NAME"
      value: "ap-tiflow-release-7-5-pull-cdc-integration-kafka-test-543-z3st2"
    - name: "JENKINS_NAME"
      value: "ap-tiflow-release-7-5-pull-cdc-integration-kafka-test-543-z3st2"
    - name: "JENKINS_AGENT_WORKDIR"
      value: "/home/jenkins/agent"
    - name: "JENKINS_URL"
      value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/"
    image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2"
    name: "jnlp"
    resources:
      requests:
        memory: "256Mi"
        cpu: "100m"
    volumeMounts:
    - mountPath: "/home/jenkins/agent"
      name: "workspace-volume"
      readOnly: false
  restartPolicy: "Never"
  securityContext:
    fsGroup: 1000
  volumes:
  - emptyDir: {}
    name: "volume-0"
  - emptyDir:
      medium: ""
    name: "workspace-volume"

Running on ap-tiflow-release-7-5-pull-cdc-integration-kafka-test-543-z3st2 in /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_cdc_integration_kafka_test
[Pipeline] {
[Pipeline] stage
[Pipeline] { (Declarative: Checkout SCM)
[Pipeline] checkout
The recommended git tool is: git
No credentials specified
Cloning the remote Git repository
Using shallow clone with depth 1
Cloning repository https://github.com/PingCAP-QE/ci.git
 > git init /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_cdc_integration_kafka_test # timeout=10
Fetching upstream changes from https://github.com/PingCAP-QE/ci.git
 > git --version # timeout=10
 > git --version # 'git version 2.39.2'
 > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5
Avoid second fetch
Checking out Revision 8c60e558a9356c5a25645eebaade802e946e45b6 (origin/main)
Commit message: "feat: disable old pipeline trigger on tiflash master (#2944)"
 > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10
 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10
 > git rev-parse origin/main^{commit} # timeout=10
 > git config core.sparsecheckout # timeout=10
 > git checkout -f 8c60e558a9356c5a25645eebaade802e946e45b6 # timeout=10
 > git rev-list --no-walk 8c60e558a9356c5a25645eebaade802e946e45b6 # timeout=10
[Pipeline] }
[Pipeline] // stage
[Pipeline] withEnv
[Pipeline] {
[Pipeline] container
[Pipeline] {
[Pipeline] withEnv
[Pipeline] {
[Pipeline] timeout
Timeout set to expire in 1 hr 5 min
[Pipeline] {
[Pipeline] stage
[Pipeline] { (Debug info)
[Pipeline] sh
+ printenv
PROW_JOB_ID=8aaac853-9c62-4b3f-975c-6ada541b547f
JENKINS_NODE_COOKIE=3e6725a2-671b-4e1b-9db0-30178f67d19e
BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/release-7.5/job/pull_cdc_integration_kafka_test/543/
GOLANG_VERSION=1.21.0
HOSTNAME=ap-tiflow-release-7-5-pull-cdc-integration-kafka-test-543-z3st2
HUDSON_SERVER_COOKIE=83ef27fe9acccc92
KUBERNETES_PORT_443_TCP_PORT=443
KUBERNETES_PORT=tcp://10.233.0.1:443
TERM=xterm
STAGE_NAME=Debug info
BUILD_TAG=jenkins-pingcap-tiflow-release-7.5-pull_cdc_integration_kafka_test-543
KUBERNETES_SERVICE_PORT=443
GIT_PREVIOUS_COMMIT=8c60e558a9356c5a25645eebaade802e946e45b6
JOB_SPEC={"type":"presubmit","job":"pingcap/tiflow/release-7.5/pull_cdc_integration_kafka_test","buildid":"1784761177889312769","prowjobid":"8aaac853-9c62-4b3f-975c-6ada541b547f","refs":{"org":"pingcap","repo":"tiflow","repo_link":"https://github.com/pingcap/tiflow","base_ref":"release-7.5","base_sha":"fd3a6e756d0d488c7e60bedb05e21e42320c3dc7","base_link":"https://github.com/pingcap/tiflow/commit/fd3a6e756d0d488c7e60bedb05e21e42320c3dc7","pulls":[{"number":10992,"author":"ti-chi-bot","sha":"0b894c989edc3b2da4543993b9e55c517d9f0114","title":"*: bump pd version to 7179657 (#10881)","link":"https://github.com/pingcap/tiflow/pull/10992","commit_link":"https://github.com/pingcap/tiflow/pull/10992/commits/0b894c989edc3b2da4543993b9e55c517d9f0114","author_link":"https://github.com/ti-chi-bot"}]}}
KUBERNETES_SERVICE_HOST=10.233.0.1
WORKSPACE=/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_cdc_integration_kafka_test
JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/release-7.5/job/pull_cdc_integration_kafka_test/
RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/release-7.5/job/pull_cdc_integration_kafka_test/543/display/redirect?page=changes
RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/release-7.5/job/pull_cdc_integration_kafka_test/543/display/redirect?page=artifacts
FILE_SERVER_URL=http://fileserver.pingcap.net
JENKINS_HOME=/var/jenkins_home
GIT_COMMIT=8c60e558a9356c5a25645eebaade802e946e45b6
PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin
RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/release-7.5/job/pull_cdc_integration_kafka_test/543/display/redirect
GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct
_=/usr/bin/printenv
POD_CONTAINER=golang
PWD=/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_cdc_integration_kafka_test
HUDSON_URL=https://do.pingcap.net/jenkins/
JOB_NAME=pingcap/tiflow/release-7.5/pull_cdc_integration_kafka_test
TZ=Asia/Shanghai
BUILD_DISPLAY_NAME=#543
JENKINS_URL=https://do.pingcap.net/jenkins/
BUILD_ID=1784761177889312769
GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742
JOB_BASE_NAME=pull_cdc_integration_kafka_test
GIT_PREVIOUS_SUCCESSFUL_COMMIT=8c60e558a9356c5a25645eebaade802e946e45b6
RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/release-7.5/job/pull_cdc_integration_kafka_test/543/display/redirect?page=tests
SHLVL=3
HOME=/home/jenkins
POD_LABEL=ap_tiflow_release-7_5_pull_cdc_integration_kafka_test_543-99slp
GOROOT=/usr/local/go
GIT_BRANCH=origin/main
KUBERNETES_PORT_443_TCP_PROTO=tcp
TINI_VERSION=v0.19.0
CI=true
KUBERNETES_SERVICE_PORT_HTTPS=443
WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_cdc_integration_kafka_test@tmp
EXECUTOR_NUMBER=0
JENKINS_SERVER_COOKIE=durable-a1283f80f8ebe563bfe9f2bd09649a5cc54c1f9b857850fc6066be7891f3e273
NODE_LABELS=ap_tiflow_release-7_5_pull_cdc_integration_kafka_test_543-99slp ap-tiflow-release-7-5-pull-cdc-integration-kafka-test-543-z3st2
GIT_URL=https://github.com/PingCAP-QE/ci.git
HUDSON_HOME=/var/jenkins_home
CLASSPATH=
NODE_NAME=ap-tiflow-release-7-5-pull-cdc-integration-kafka-test-543-z3st2
GOPATH=/go
JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tiflow/job/release-7.5/job/pull_cdc_integration_kafka_test/display/redirect
BUILD_NUMBER=543
KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1
KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443
GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz
+ echo -------------------------
-------------------------
+ go env
GO111MODULE=''
GOARCH='amd64'
GOBIN=''
GOCACHE='/home/jenkins/.cache/go-build'
GOENV='/home/jenkins/.config/go/env'
GOEXE=''
GOEXPERIMENT=''
GOFLAGS=''
GOHOSTARCH='amd64'
GOHOSTOS='linux'
GOINSECURE=''
GOMODCACHE='/go/pkg/mod'
GONOPROXY=''
GONOSUMDB=''
GOOS='linux'
GOPATH='/go'
GOPRIVATE=''
GOPROXY='http://goproxy.apps.svc,https://proxy.golang.org,direct'
GOROOT='/usr/local/go'
GOSUMDB='sum.golang.org'
GOTMPDIR=''
GOTOOLCHAIN='auto'
GOTOOLDIR='/usr/local/go/pkg/tool/linux_amd64'
GOVCS=''
GOVERSION='go1.21.0'
GCCGO='gccgo'
GOAMD64='v1'
AR='ar'
CC='gcc'
CXX='g++'
CGO_ENABLED='1'
GOMOD='/dev/null'
GOWORK=''
CGO_CFLAGS='-O2 -g'
CGO_CPPFLAGS=''
CGO_CXXFLAGS='-O2 -g'
CGO_FFLAGS='-O2 -g'
CGO_LDFLAGS='-O2 -g'
PKG_CONFIG='pkg-config'
GOGCCFLAGS='-fPIC -m64 -pthread -Wl,--no-gc-sections -fmessage-length=0 -fdebug-prefix-map=/tmp/go-build35395030=/tmp/go-build -gno-record-gcc-switches'
+ echo -------------------------
-------------------------
+ echo 'debug command: kubectl -n jenkins-tiflow exec -ti ap-tiflow-release-7-5-pull-cdc-integration-kafka-test-543-z3st2 bash'
debug command: kubectl -n jenkins-tiflow exec -ti ap-tiflow-release-7-5-pull-cdc-integration-kafka-test-543-z3st2 bash
[Pipeline] container
[Pipeline] {
[Pipeline] sh
+ dig github.com

; <<>> DiG 9.18.16 <<>> github.com
;; global options: +cmd
;; Got answer:
;; ->>HEADER<<- opcode: QUERY, status: NOERROR, id: 21154
;; flags: qr aa rd ra; QUERY: 1, ANSWER: 1, AUTHORITY: 0, ADDITIONAL: 1

;; OPT PSEUDOSECTION:
; EDNS: version: 0, flags:; udp: 1232
; COOKIE: 644a5b08f2358914 (echoed)
;; QUESTION SECTION:
;github.com.			IN	A

;; ANSWER SECTION:
github.com.		20	IN	A	20.205.243.166

;; Query time: 0 msec
;; SERVER: 169.254.25.10#53(169.254.25.10) (UDP)
;; WHEN: Mon Apr 29 01:47:10 UTC 2024
;; MSG SIZE  rcvd: 77

[Pipeline] script
[Pipeline] {
[Pipeline] }
[Pipeline] // script
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // stage
[Pipeline] stage
[Pipeline] { (Check diff files)
[Pipeline] container
[Pipeline] {
[Pipeline] script
[Pipeline] {
[Pipeline] withCredentials
Masking supported pattern matches of $token
[Pipeline] {
[Pipeline] httpRequest
Warning: A secret was passed to "httpRequest" using Groovy String interpolation, which is insecure.
		 Affected argument(s) used the following variable(s): [token]
		 See https://jenkins.io/redirect/groovy-string-interpolation for details.
HttpMethod: GET
URL: https://api.github.com/repos/pingcap/tiflow/pulls/10992/files?page=1&per_page=100
Content-Type: application/json
Authorization: *****
Sending request to url: https://api.github.com/repos/pingcap/tiflow/pulls/10992/files?page=1&per_page=100
Response Code: HTTP/1.1 200 OK
Success: Status code 200 is in the accepted range: 100:399
[Pipeline] httpRequest
Warning: A secret was passed to "httpRequest" using Groovy String interpolation, which is insecure.
		 Affected argument(s) used the following variable(s): [token]
		 See https://jenkins.io/redirect/groovy-string-interpolation for details.
HttpMethod: GET
URL: https://api.github.com/repos/pingcap/tiflow/pulls/10992/files?page=2&per_page=100
Content-Type: application/json
Authorization: *****
Sending request to url: https://api.github.com/repos/pingcap/tiflow/pulls/10992/files?page=2&per_page=100
Response Code: HTTP/1.1 200 OK
Success: Status code 200 is in the accepted range: 100:399
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] echo
pr_diff_files: [cmd/oauth2-server/main.go, go.mod, go.sum]
[Pipeline] echo
diff file not matched: cmd/oauth2-server/main.go
[Pipeline] }
[Pipeline] // script
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // stage
[Pipeline] stage
[Pipeline] { (Checkout)
[Pipeline] timeout
Timeout set to expire in 10 min
[Pipeline] {
[Pipeline] dir
Running in /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_cdc_integration_kafka_test/tiflow
[Pipeline] {
[Pipeline] cache
Cache restored successfully (git/pingcap/tiflow/rev-ba6db07-ad8f60f)
203349504 bytes in 1.78 secs (114058750 bytes/sec)
[Pipeline] {
[Pipeline] retry
[Pipeline] {
[Pipeline] script
[Pipeline] {
[Pipeline] sh
git version 2.36.6
Reinitialized existing Git repository in /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_cdc_integration_kafka_test/tiflow/.git/
.git
HEAD is now at ad8f60fd4 enbale cli_with_auth
POST git-upload-pack (686 bytes)
POST git-upload-pack (973 bytes)
From https://github.com/pingcap/tiflow
   47417d775..fd3a6e756  release-7.5          -> origin/release-7.5
 = [up to date]          refs/pull/10992/head -> origin/pr/10992/head
Previous HEAD position was ad8f60fd4 enbale cli_with_auth
HEAD is now at fd3a6e756 * (ticdc): manually cherry-pick simple protocol to release-7.5 (#10898)
๐Ÿšง Checkouting to base SHA:fd3a6e756d0d488c7e60bedb05e21e42320c3dc7...
HEAD is now at fd3a6e756 * (ticdc): manually cherry-pick simple protocol to release-7.5 (#10898)
โœ… Checked. ๐ŸŽ‰
๐Ÿงพ HEAD info:
fd3a6e756d0d488c7e60bedb05e21e42320c3dc7
fd3a6e756 * (ticdc): manually cherry-pick simple protocol to release-7.5 (#10898)
47417d775 mounter(ticdc): calculate row level checksum for timestmap by using UTC time zone (#10564) (#10646)
3ab47635f *: release-7.5 bump tidb dependencies (#10970)
๐Ÿšง Pre-merge heads of pull requests to base SHA: fd3a6e756d0d488c7e60bedb05e21e42320c3dc7 ...
Auto-merging go.mod
CONFLICT (content): Merge conflict in go.mod
Auto-merging go.sum
CONFLICT (content): Merge conflict in go.sum
Automatic merge failed; fix conflicts and then commit the result.
[Pipeline] }
[Pipeline] // script
[Pipeline] }
ERROR: script returned exit code 1
Retrying
[Pipeline] {
[Pipeline] script
[Pipeline] {
[Pipeline] sh
git version 2.36.6
Reinitialized existing Git repository in /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_cdc_integration_kafka_test/tiflow/.git/
.git
HEAD is now at fd3a6e756 * (ticdc): manually cherry-pick simple protocol to release-7.5 (#10898)
POST git-upload-pack (686 bytes)
From https://github.com/pingcap/tiflow
 = [up to date]          release-7.5          -> origin/release-7.5
 = [up to date]          refs/pull/10992/head -> origin/pr/10992/head
HEAD is now at fd3a6e756 * (ticdc): manually cherry-pick simple protocol to release-7.5 (#10898)
๐Ÿšง Checkouting to base SHA:fd3a6e756d0d488c7e60bedb05e21e42320c3dc7...
HEAD is now at fd3a6e756 * (ticdc): manually cherry-pick simple protocol to release-7.5 (#10898)
โœ… Checked. ๐ŸŽ‰
๐Ÿงพ HEAD info:
fd3a6e756d0d488c7e60bedb05e21e42320c3dc7
fd3a6e756 * (ticdc): manually cherry-pick simple protocol to release-7.5 (#10898)
47417d775 mounter(ticdc): calculate row level checksum for timestmap by using UTC time zone (#10564) (#10646)
3ab47635f *: release-7.5 bump tidb dependencies (#10970)
๐Ÿšง Pre-merge heads of pull requests to base SHA: fd3a6e756d0d488c7e60bedb05e21e42320c3dc7 ...
Auto-merging go.mod
CONFLICT (content): Merge conflict in go.mod
Auto-merging go.sum
CONFLICT (content): Merge conflict in go.sum
Automatic merge failed; fix conflicts and then commit the result.
[Pipeline] }
[Pipeline] // script
[Pipeline] }
[Pipeline] // retry
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
[Pipeline] stage
[Pipeline] { (prepare)
Stage "prepare" skipped due to earlier failure(s)
[Pipeline] }
[Pipeline] // stage
[Pipeline] stage
[Pipeline] { (Tests)
Stage "Tests" skipped due to earlier failure(s)
[Pipeline] parallel
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G00')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G01')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G02')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G03')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G04')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G05')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G06')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G07')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G08')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G09')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G10')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G11')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G12')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G13')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G14')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G15')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G16')
[Pipeline] { (Branch: Matrix - TEST_GROUP = 'G17')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G00')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G01')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G02')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G03')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G04')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G05')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G06')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G07')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G08')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G09')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G10')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G11')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G12')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G13')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G14')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G15')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G16')
[Pipeline] stage
[Pipeline] { (Matrix - TEST_GROUP = 'G17')
Stage "Matrix - TEST_GROUP = 'G00'" skipped due to earlier failure(s)
Stage "Matrix - TEST_GROUP = 'G01'" skipped due to earlier failure(s)
Stage "Matrix - TEST_GROUP = 'G02'" skipped due to earlier failure(s)
Stage "Matrix - TEST_GROUP = 'G03'" skipped due to earlier failure(s)
Stage "Matrix - TEST_GROUP = 'G04'" skipped due to earlier failure(s)
Stage "Matrix - TEST_GROUP = 'G05'" skipped due to earlier failure(s)
Stage "Matrix - TEST_GROUP = 'G06'" skipped due to earlier failure(s)
Stage "Matrix - TEST_GROUP = 'G07'" skipped due to earlier failure(s)
Stage "Matrix - TEST_GROUP = 'G08'" skipped due to earlier failure(s)
Stage "Matrix - TEST_GROUP = 'G09'" skipped due to earlier failure(s)
Stage "Matrix - TEST_GROUP = 'G10'" skipped due to earlier failure(s)
Stage "Matrix - TEST_GROUP = 'G11'" skipped due to earlier failure(s)
Stage "Matrix - TEST_GROUP = 'G12'" skipped due to earlier failure(s)
Stage "Matrix - TEST_GROUP = 'G13'" skipped due to earlier failure(s)
Stage "Matrix - TEST_GROUP = 'G14'" skipped due to earlier failure(s)
Stage "Matrix - TEST_GROUP = 'G15'" skipped due to earlier failure(s)
Stage "Matrix - TEST_GROUP = 'G16'" skipped due to earlier failure(s)
Stage "Matrix - TEST_GROUP = 'G17'" skipped due to earlier failure(s)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
[Pipeline] stage
[Pipeline] { (Test)
Stage "Matrix - TEST_GROUP = 'G00'" skipped due to earlier failure(s)
[Pipeline] }
Stage "Matrix - TEST_GROUP = 'G01'" skipped due to earlier failure(s)
[Pipeline] }
Stage "Matrix - TEST_GROUP = 'G02'" skipped due to earlier failure(s)
[Pipeline] }
Stage "Matrix - TEST_GROUP = 'G03'" skipped due to earlier failure(s)
[Pipeline] }
Stage "Matrix - TEST_GROUP = 'G04'" skipped due to earlier failure(s)
[Pipeline] }
Stage "Matrix - TEST_GROUP = 'G05'" skipped due to earlier failure(s)
[Pipeline] }
Stage "Matrix - TEST_GROUP = 'G06'" skipped due to earlier failure(s)
[Pipeline] }
Stage "Matrix - TEST_GROUP = 'G07'" skipped due to earlier failure(s)
[Pipeline] }
Stage "Matrix - TEST_GROUP = 'G08'" skipped due to earlier failure(s)
[Pipeline] }
Stage "Matrix - TEST_GROUP = 'G09'" skipped due to earlier failure(s)
[Pipeline] }
Stage "Matrix - TEST_GROUP = 'G10'" skipped due to earlier failure(s)
[Pipeline] }
Stage "Matrix - TEST_GROUP = 'G11'" skipped due to earlier failure(s)
[Pipeline] }
Stage "Matrix - TEST_GROUP = 'G12'" skipped due to earlier failure(s)
[Pipeline] }
Stage "Matrix - TEST_GROUP = 'G13'" skipped due to earlier failure(s)
[Pipeline] }
Stage "Matrix - TEST_GROUP = 'G14'" skipped due to earlier failure(s)
[Pipeline] }
Stage "Matrix - TEST_GROUP = 'G15'" skipped due to earlier failure(s)
[Pipeline] }
Stage "Matrix - TEST_GROUP = 'G16'" skipped due to earlier failure(s)
[Pipeline] }
Stage "Matrix - TEST_GROUP = 'G17'" skipped due to earlier failure(s)
[Pipeline] }
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G00'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G01'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G02'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G03'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G04'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G05'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G06'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G07'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G08'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G09'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G10'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G11'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G12'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G13'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G14'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G15'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G16'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G17'
[Pipeline] // parallel
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] End of Pipeline
ERROR: script returned exit code 1
Finished: FAILURE