Started by user Jenkins Admin Obtained pipelines/pingcap/tidb/latest/ghpr_check2.groovy from git https://github.com/PingCAP-QE/ci.git Loading library tipipeline@main Library tipipeline@main is cached. Copying from home. [Pipeline] Start of Pipeline [Pipeline] readJSON [Pipeline] readTrusted Obtained pipelines/pingcap/tidb/latest/pod-ghpr_check2.yaml from git https://github.com/PingCAP-QE/ci.git [Pipeline] podTemplate [Pipeline] { [Pipeline] node Created Pod: kubernetes jenkins-tidb/pingcap-tidb-ghpr-check2-10517-6d999-9w2bt-xg125 Agent pingcap-tidb-ghpr-check2-10517-6d999-9w2bt-xg125 is provisioned from template pingcap_tidb_ghpr_check2_10517-6d999-9w2bt --- apiVersion: "v1" kind: "Pod" metadata: annotations: buildUrl: "http://jenkins.apps.svc.cluster.local:8080/jenkins/job/pingcap/job/tidb/job/ghpr_check2/10517/" runUrl: "job/pingcap/job/tidb/job/ghpr_check2/10517/" labels: jenkins/jenkins-jenkins-agent: "true" jenkins/label-digest: "5e126fdcf62825e0db02ef4cf2840795d6f3956c" jenkins/label: "pingcap_tidb_ghpr_check2_10517-6d999" name: "pingcap-tidb-ghpr-check2-10517-6d999-9w2bt-xg125" namespace: "jenkins-tidb" spec: affinity: nodeAffinity: requiredDuringSchedulingIgnoredDuringExecution: nodeSelectorTerms: - matchExpressions: - key: "kubernetes.io/arch" operator: "In" values: - "amd64" - key: "ci-nvme-high-performance" operator: "In" values: - "true" containers: - image: "hub.pingcap.net/wangweizhen/tidb_image:go12120230809" lifecycle: postStart: exec: command: - "/bin/sh" - "/data/bazel-prepare-in-container.sh" name: "golang" resources: limits: memory: "32Gi" cpu: "8" securityContext: privileged: true tty: true volumeMounts: - mountPath: "/home/jenkins/.tidb/tmp" name: "bazel-out-merged" - mountPath: "/bazel-out-lower" name: "bazel-out-lower" subPath: "tidb/go1.19.2" - mountPath: "/bazel-out-overlay" name: "bazel-out-overlay" - mountPath: "/share/.cache/go-build" name: "gocache" - mountPath: "/share/.go" name: "gopathcache" - mountPath: "/share/.cache/bazel-repository-cache" name: "bazel-repository-cache" - mountPath: "/data/" name: "bazel-rc" readOnly: true - mountPath: "/etc/containerinfo" name: "containerinfo" - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/network-multitool" name: "net-tool" resources: limits: memory: "128Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - image: "hub.pingcap.net/jenkins/python3-requests:latest" name: "report" resources: limits: memory: "256Mi" cpu: "100m" tty: true volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false - env: - name: "JENKINS_SECRET" value: "********" - name: "JENKINS_TUNNEL" value: "jenkins-agent.apps.svc.cluster.local:50000" - name: "JENKINS_AGENT_NAME" value: "pingcap-tidb-ghpr-check2-10517-6d999-9w2bt-xg125" - name: "JENKINS_NAME" value: "pingcap-tidb-ghpr-check2-10517-6d999-9w2bt-xg125" - name: "JENKINS_AGENT_WORKDIR" value: "/home/jenkins/agent" - name: "JENKINS_URL" value: "http://jenkins.apps.svc.cluster.local:8080/jenkins/" image: "jenkins/inbound-agent:3206.vb_15dcf73f6a_9-2" name: "jnlp" resources: requests: memory: "256Mi" cpu: "100m" volumeMounts: - mountPath: "/home/jenkins/agent" name: "workspace-volume" readOnly: false restartPolicy: "Never" securityContext: fsGroup: 1000 volumes: - emptyDir: {} name: "bazel-out-merged" - name: "bazel-rc" secret: secretName: "bazel" - emptyDir: {} name: "bazel-out-overlay" - name: "gocache" persistentVolumeClaim: claimName: "gocache" - downwardAPI: items: - path: "cpu_limit" resourceFieldRef: containerName: "golang" resource: "limits.cpu" - path: "cpu_request" resourceFieldRef: containerName: "golang" resource: "requests.cpu" - path: "mem_limit" resourceFieldRef: containerName: "golang" resource: "limits.memory" - path: "mem_request" resourceFieldRef: containerName: "golang" resource: "requests.memory" name: "containerinfo" - name: "bazel-out-lower" persistentVolumeClaim: claimName: "bazel-out-data" - emptyDir: medium: "" name: "workspace-volume" - name: "gopathcache" persistentVolumeClaim: claimName: "gopathcache" - name: "bazel-repository-cache" persistentVolumeClaim: claimName: "bazel-repository-cache" Running on pingcap-tidb-ghpr-check2-10517-6d999-9w2bt-xg125 in /home/jenkins/agent/workspace/pingcap/tidb/ghpr_check2 [Pipeline] { [Pipeline] stage [Pipeline] { (Declarative: Checkout SCM) [Pipeline] checkout The recommended git tool is: git No credentials specified Cloning the remote Git repository Using shallow clone with depth 1 Cloning repository https://github.com/PingCAP-QE/ci.git > git init /home/jenkins/agent/workspace/pingcap/tidb/ghpr_check2 # timeout=10 Fetching upstream changes from https://github.com/PingCAP-QE/ci.git > git --version # timeout=10 > git --version # 'git version 2.39.2' > git fetch --tags --force --progress --depth=1 -- https://github.com/PingCAP-QE/ci.git +refs/heads/*:refs/remotes/origin/* # timeout=5 Avoid second fetch Checking out Revision e64357d6ddd9be700fd5c8d87859c826b78da53a (origin/main) Commit message: "Define code ownership using "OWNERS" files in the central CI config repo (#2976)" > git config remote.origin.url https://github.com/PingCAP-QE/ci.git # timeout=10 > git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/* # timeout=10 > git rev-parse origin/main^{commit} # timeout=10 > git config core.sparsecheckout # timeout=10 > git checkout -f e64357d6ddd9be700fd5c8d87859c826b78da53a # timeout=10 > git rev-list --no-walk e64357d6ddd9be700fd5c8d87859c826b78da53a # timeout=10 [Pipeline] } [Pipeline] // stage [Pipeline] withEnv [Pipeline] { [Pipeline] container [Pipeline] { [Pipeline] withEnv [Pipeline] { [Pipeline] timeout Timeout set to expire in 1 hr 5 min [Pipeline] { [Pipeline] stage [Pipeline] { (Debug info) [Pipeline] sh + printenv PROW_JOB_ID=397a88b4-ce26-4009-a827-92683508deb4 JENKINS_NODE_COOKIE=c1b040d5-dbaa-4c89-9643-9dd3b326524e BUILD_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tidb/job/ghpr_check2/10517/ GOLANG_VERSION=1.21.0 HOSTNAME=pingcap-tidb-ghpr-check2-10517-6d999-9w2bt-xg125 HUDSON_SERVER_COOKIE=83ef27fe9acccc92 KUBERNETES_PORT=tcp://10.233.0.1:443 KUBERNETES_PORT_443_TCP_PORT=443 TERM=xterm STAGE_NAME=Debug info BUILD_TAG=jenkins-pingcap-tidb-ghpr_check2-10517 KUBERNETES_SERVICE_PORT=443 GIT_PREVIOUS_COMMIT=e64357d6ddd9be700fd5c8d87859c826b78da53a JOB_SPEC={"type":"presubmit","job":"pingcap/tidb/ghpr_check2","buildid":"1796721775883587584","prowjobid":"397a88b4-ce26-4009-a827-92683508deb4","refs":{"org":"pingcap","repo":"tidb","repo_link":"https://github.com/pingcap/tidb","base_ref":"master","base_sha":"4670bf577a54a631d2e6179eb05c29c536b8b08f","base_link":"https://github.com/pingcap/tidb/commit/4670bf577a54a631d2e6179eb05c29c536b8b08f","pulls":[{"number":51126,"author":"hawkingrei","sha":"05a77e6fe44ca1328a99962dc80b9bd2ae2c3bfe","title":"*: upgrade go1.22.3","link":"https://github.com/pingcap/tidb/pull/51126","commit_link":"https://github.com/pingcap/tidb/pull/51126/commits/05a77e6fe44ca1328a99962dc80b9bd2ae2c3bfe","author_link":"https://github.com/hawkingrei"}]}} KUBERNETES_SERVICE_HOST=10.233.0.1 WORKSPACE=/home/jenkins/agent/workspace/pingcap/tidb/ghpr_check2 JOB_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tidb/job/ghpr_check2/ RUN_CHANGES_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tidb/job/ghpr_check2/10517/display/redirect?page=changes RUN_ARTIFACTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tidb/job/ghpr_check2/10517/display/redirect?page=artifacts FILE_SERVER_URL=http://fileserver.pingcap.net JENKINS_HOME=/var/jenkins_home GIT_COMMIT=e64357d6ddd9be700fd5c8d87859c826b78da53a PATH=/go/bin:/usr/local/go/bin:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin RUN_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tidb/job/ghpr_check2/10517/display/redirect GOPROXY=http://goproxy.apps.svc,https://proxy.golang.org,direct _=/usr/bin/printenv POD_CONTAINER=golang PWD=/home/jenkins/agent/workspace/pingcap/tidb/ghpr_check2 HUDSON_URL=https://do.pingcap.net/jenkins/ JOB_NAME=pingcap/tidb/ghpr_check2 TZ=Asia/Shanghai BUILD_DISPLAY_NAME=#10517 JENKINS_URL=https://do.pingcap.net/jenkins/ BUILD_ID=1796721775883587584 GOLANG_DOWNLOAD_SHA256=d0398903a16ba2232b389fb31032ddf57cac34efda306a0eebac34f0965a0742 JOB_BASE_NAME=ghpr_check2 GIT_PREVIOUS_SUCCESSFUL_COMMIT=e64357d6ddd9be700fd5c8d87859c826b78da53a RUN_TESTS_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tidb/job/ghpr_check2/10517/display/redirect?page=tests SHLVL=3 HOME=/home/jenkins POD_LABEL=pingcap_tidb_ghpr_check2_10517-6d999 GOROOT=/usr/local/go GIT_BRANCH=origin/main KUBERNETES_PORT_443_TCP_PROTO=tcp CI=true KUBERNETES_SERVICE_PORT_HTTPS=443 WORKSPACE_TMP=/home/jenkins/agent/workspace/pingcap/tidb/ghpr_check2@tmp EXECUTOR_NUMBER=0 JENKINS_SERVER_COOKIE=durable-da268d9e668a40ac9bdc52566cc558810a4c43d7bbe3d53ea4e01dfdf6821af4 NODE_LABELS=pingcap_tidb_ghpr_check2_10517-6d999 pingcap-tidb-ghpr-check2-10517-6d999-9w2bt-xg125 GIT_URL=https://github.com/PingCAP-QE/ci.git HUDSON_HOME=/var/jenkins_home CLASSPATH= NODE_NAME=pingcap-tidb-ghpr-check2-10517-6d999-9w2bt-xg125 GOPATH=/go JOB_DISPLAY_URL=https://do.pingcap.net/jenkins/job/pingcap/job/tidb/job/ghpr_check2/display/redirect BUILD_NUMBER=10517 KUBERNETES_PORT_443_TCP_ADDR=10.233.0.1 KUBERNETES_PORT_443_TCP=tcp://10.233.0.1:443 GOLANG_DOWNLOAD_URL=https://dl.google.com/go/go1.21.0.linux-amd64.tar.gz + echo ------------------------- ------------------------- + go env GO111MODULE='' GOARCH='amd64' GOBIN='' GOCACHE='/home/jenkins/.cache/go-build' GOENV='/home/jenkins/.config/go/env' GOEXE='' GOEXPERIMENT='' GOFLAGS='' GOHOSTARCH='amd64' GOHOSTOS='linux' GOINSECURE='' GOMODCACHE='/go/pkg/mod' GONOPROXY='' GONOSUMDB='' GOOS='linux' GOPATH='/go' GOPRIVATE='' GOPROXY='http://goproxy.apps.svc,https://proxy.golang.org,direct' GOROOT='/usr/local/go' GOSUMDB='sum.golang.org' GOTMPDIR='' GOTOOLCHAIN='auto' GOTOOLDIR='/usr/local/go/pkg/tool/linux_amd64' GOVCS='' GOVERSION='go1.21.0' GCCGO='gccgo' GOAMD64='v1' AR='ar' CC='gcc' CXX='g++' CGO_ENABLED='1' GOMOD='/dev/null' GOWORK='' CGO_CFLAGS='-O2 -g' CGO_CPPFLAGS='' CGO_CXXFLAGS='-O2 -g' CGO_FFLAGS='-O2 -g' CGO_LDFLAGS='-O2 -g' PKG_CONFIG='pkg-config' GOGCCFLAGS='-fPIC -m64 -pthread -Wl,--no-gc-sections -fmessage-length=0 -fdebug-prefix-map=/tmp/go-build4267444080=/tmp/go-build -gno-record-gcc-switches' + echo ------------------------- ------------------------- + echo 'debug command: kubectl -n jenkins-tidb exec -ti pingcap-tidb-ghpr-check2-10517-6d999-9w2bt-xg125 bash' debug command: kubectl -n jenkins-tidb exec -ti pingcap-tidb-ghpr-check2-10517-6d999-9w2bt-xg125 bash [Pipeline] container [Pipeline] { [Pipeline] sh + dig github.com ; <<>> DiG 9.18.16 <<>> github.com ;; global options: +cmd ;; Got answer: ;; ->>HEADER<<- opcode: QUERY, status: NOERROR, id: 42522 ;; flags: qr aa rd ra; QUERY: 1, ANSWER: 1, AUTHORITY: 0, ADDITIONAL: 1 ;; OPT PSEUDOSECTION: ; EDNS: version: 0, flags:; udp: 1232 ; COOKIE: aff780aa3759c415 (echoed) ;; QUESTION SECTION: ;github.com. IN A ;; ANSWER SECTION: github.com. 26 IN A 20.205.243.166 ;; Query time: 0 msec ;; SERVER: 169.254.25.10#53(169.254.25.10) (UDP) ;; WHEN: Sat Jun 01 01:54:14 UTC 2024 ;; MSG SIZE rcvd: 77 [Pipeline] script [Pipeline] { [Pipeline] } [Pipeline] // script [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Checkout) [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tidb/ghpr_check2/tidb [Pipeline] { [Pipeline] cache Cache restored successfully (git/pingcap/tidb/rev-4670bf5) 586156032 bytes in 1.91 secs (307166106 bytes/sec) [Pipeline] { [Pipeline] retry [Pipeline] { [Pipeline] script [Pipeline] { [Pipeline] sh git version 2.37.2 Reinitialized existing Git repository in /home/jenkins/agent/workspace/pingcap/tidb/ghpr_check2/tidb/.git/ .git HEAD is now at 4670bf577a ddl: separate `sessionctx.Context` in `backfillCtx` to sub-contexts (#53671) POST git-upload-pack (656 bytes) POST git-upload-pack (973 bytes) From https://github.com/pingcap/tidb = [up to date] master -> origin/master + 0f12152e9b...05a77e6fe4 refs/pull/51126/head -> origin/pr/51126/head (forced update) HEAD is now at 4670bf577a ddl: separate `sessionctx.Context` in `backfillCtx` to sub-contexts (#53671) 🚧 Checkouting to base SHA:4670bf577a54a631d2e6179eb05c29c536b8b08f... HEAD is now at 4670bf577a ddl: separate `sessionctx.Context` in `backfillCtx` to sub-contexts (#53671) ✅ Checked. 🎉 🧾 HEAD info: 4670bf577a54a631d2e6179eb05c29c536b8b08f 4670bf577a ddl: separate `sessionctx.Context` in `backfillCtx` to sub-contexts (#53671) 3fdb963a33 domain: do not reset the data for infoschema v2 when full load (#53543) 90e1049b67 ddl: remove useless fields in `CopContextBase` (#53721) 🚧 Pre-merge heads of pull requests to base SHA: 4670bf577a54a631d2e6179eb05c29c536b8b08f ... Updating 4670bf577a..05a77e6fe4 Fast-forward Dockerfile | 2 +- Dockerfile.enterprise | 2 +- WORKSPACE | 2 +- build/image/base | 6 +++--- build/image/centos7_jenkins | 2 +- go.mod | 2 +- pkg/executor/executor_pkg_test.go | 2 ++ pkg/util/hack/hack.go | 2 +- 8 files changed, 11 insertions(+), 9 deletions(-) 🧾 Pre-merged result: 05a77e6fe44ca1328a99962dc80b9bd2ae2c3bfe 05a77e6fe4 *: upgrade abbc0b0c86 *: upgrade go1.22.0 4670bf577a ddl: separate `sessionctx.Context` in `backfillCtx` to sub-contexts (#53671) ✅ Pre merged 🎉 ✅ ~~~~~All done.~~~~~~ [Pipeline] } [Pipeline] // script [Pipeline] } [Pipeline] // retry [Pipeline] } Cache saved successfully (git/pingcap/tidb/rev-4670bf5-05a77e6) 586192384 bytes in 8.87 secs (66073933 bytes/sec) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Prepare) [Pipeline] dir Running in /home/jenkins/agent/workspace/pingcap/tidb/ghpr_check2/tidb [Pipeline] { [Pipeline] cache Cache not restored (no such key found) [Pipeline] { [Pipeline] sh + ls bin/tidb-server ls: cannot access bin/tidb-server: No such file or directory + make server go: downloading go1.22 (linux/amd64) go: download go1.22 for linux/amd64: toolchain not available go: downloading go1.22 (linux/amd64) go: download go1.22 for linux/amd64: toolchain not available CGO_ENABLED=1 GO111MODULE=on go build -tags codes -ldflags '-X "github.com/pingcap/tidb/pkg/parser/mysql.TiDBReleaseVersion=v8.2.0-alpha-281-g05a77e6fe4" -X "github.com/pingcap/tidb/pkg/util/versioninfo.TiDBBuildTS=2024-06-01 01:54:36" -X "github.com/pingcap/tidb/pkg/util/versioninfo.TiDBGitHash=05a77e6fe44ca1328a99962dc80b9bd2ae2c3bfe" -X "github.com/pingcap/tidb/pkg/util/versioninfo.TiDBGitBranch=HEAD" -X "github.com/pingcap/tidb/pkg/util/versioninfo.TiDBEdition=Community" ' -o bin/tidb-server ./cmd/tidb-server go: downloading go1.22 (linux/amd64) go: download go1.22 for linux/amd64: toolchain not available make: *** [server] Error 1 [Pipeline] } Cache not saved (inner-step execution failed) [Pipeline] // cache [Pipeline] } [Pipeline] // dir [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Checks) Stage "Checks" skipped due to earlier failure(s) [Pipeline] parallel [Pipeline] { (Branch: Matrix - SCRIPT_AND_ARGS = 'integrationtest_with_tikv.sh y') [Pipeline] { (Branch: Matrix - SCRIPT_AND_ARGS = 'integrationtest_with_tikv.sh n') [Pipeline] { (Branch: Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_brietest') [Pipeline] { (Branch: Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_pessimistictest') [Pipeline] { (Branch: Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_sessiontest') [Pipeline] { (Branch: Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_statisticstest') [Pipeline] { (Branch: Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_txntest') [Pipeline] { (Branch: Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest') [Pipeline] { (Branch: Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest1') [Pipeline] { (Branch: Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest2') [Pipeline] { (Branch: Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest3') [Pipeline] { (Branch: Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest4') [Pipeline] { (Branch: Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_importintotest') [Pipeline] { (Branch: Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_importintotest2') [Pipeline] { (Branch: Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_importintotest3') [Pipeline] { (Branch: Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_importintotest4') [Pipeline] { (Branch: Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_pipelineddmltest') [Pipeline] { (Branch: Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_flashbacktest') [Pipeline] stage [Pipeline] { (Matrix - SCRIPT_AND_ARGS = 'integrationtest_with_tikv.sh y') [Pipeline] stage [Pipeline] { (Matrix - SCRIPT_AND_ARGS = 'integrationtest_with_tikv.sh n') [Pipeline] stage [Pipeline] { (Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_brietest') [Pipeline] stage [Pipeline] { (Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_pessimistictest') [Pipeline] stage [Pipeline] { (Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_sessiontest') [Pipeline] stage [Pipeline] { (Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_statisticstest') [Pipeline] stage [Pipeline] { (Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_txntest') [Pipeline] stage [Pipeline] { (Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest') [Pipeline] stage [Pipeline] { (Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest1') [Pipeline] stage [Pipeline] { (Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest2') [Pipeline] stage [Pipeline] { (Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest3') [Pipeline] stage [Pipeline] { (Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest4') [Pipeline] stage [Pipeline] { (Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_importintotest') [Pipeline] stage [Pipeline] { (Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_importintotest2') [Pipeline] stage [Pipeline] { (Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_importintotest3') [Pipeline] stage [Pipeline] { (Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_importintotest4') [Pipeline] stage [Pipeline] { (Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_pipelineddmltest') [Pipeline] stage [Pipeline] { (Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_flashbacktest') Stage "Matrix - SCRIPT_AND_ARGS = 'integrationtest_with_tikv.sh y'" skipped due to earlier failure(s) Stage "Matrix - SCRIPT_AND_ARGS = 'integrationtest_with_tikv.sh n'" skipped due to earlier failure(s) Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_brietest'" skipped due to earlier failure(s) Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_pessimistictest'" skipped due to earlier failure(s) Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_sessiontest'" skipped due to earlier failure(s) Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_statisticstest'" skipped due to earlier failure(s) Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_txntest'" skipped due to earlier failure(s) Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest'" skipped due to earlier failure(s) Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest1'" skipped due to earlier failure(s) Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest2'" skipped due to earlier failure(s) Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest3'" skipped due to earlier failure(s) Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest4'" skipped due to earlier failure(s) Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_importintotest'" skipped due to earlier failure(s) Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_importintotest2'" skipped due to earlier failure(s) Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_importintotest3'" skipped due to earlier failure(s) Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_importintotest4'" skipped due to earlier failure(s) Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_pipelineddmltest'" skipped due to earlier failure(s) Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_flashbacktest'" skipped due to earlier failure(s) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) [Pipeline] stage [Pipeline] { (Test) Stage "Matrix - SCRIPT_AND_ARGS = 'integrationtest_with_tikv.sh y'" skipped due to earlier failure(s) [Pipeline] } Stage "Matrix - SCRIPT_AND_ARGS = 'integrationtest_with_tikv.sh n'" skipped due to earlier failure(s) [Pipeline] } Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_brietest'" skipped due to earlier failure(s) [Pipeline] } Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_pessimistictest'" skipped due to earlier failure(s) [Pipeline] } Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_sessiontest'" skipped due to earlier failure(s) [Pipeline] } Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_statisticstest'" skipped due to earlier failure(s) [Pipeline] } Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_txntest'" skipped due to earlier failure(s) [Pipeline] } Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest'" skipped due to earlier failure(s) [Pipeline] } Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest1'" skipped due to earlier failure(s) [Pipeline] } Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest2'" skipped due to earlier failure(s) [Pipeline] } Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest3'" skipped due to earlier failure(s) [Pipeline] } Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest4'" skipped due to earlier failure(s) [Pipeline] } Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_importintotest'" skipped due to earlier failure(s) [Pipeline] } Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_importintotest2'" skipped due to earlier failure(s) [Pipeline] } Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_importintotest3'" skipped due to earlier failure(s) [Pipeline] } Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_importintotest4'" skipped due to earlier failure(s) [Pipeline] } Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_pipelineddmltest'" skipped due to earlier failure(s) [Pipeline] } Stage "Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_flashbacktest'" skipped due to earlier failure(s) [Pipeline] } [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] } [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] // stage [Pipeline] } Failed in branch Matrix - SCRIPT_AND_ARGS = 'integrationtest_with_tikv.sh y' [Pipeline] } Failed in branch Matrix - SCRIPT_AND_ARGS = 'integrationtest_with_tikv.sh n' [Pipeline] } Failed in branch Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_brietest' [Pipeline] } Failed in branch Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_pessimistictest' [Pipeline] } Failed in branch Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_sessiontest' [Pipeline] } Failed in branch Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_statisticstest' [Pipeline] } Failed in branch Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_txntest' [Pipeline] } Failed in branch Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest' [Pipeline] } Failed in branch Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest1' [Pipeline] } Failed in branch Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest2' [Pipeline] } Failed in branch Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest3' [Pipeline] } Failed in branch Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_addindextest4' [Pipeline] } Failed in branch Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_importintotest' [Pipeline] } Failed in branch Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_importintotest2' [Pipeline] } Failed in branch Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_importintotest3' [Pipeline] } Failed in branch Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_importintotest4' [Pipeline] } Failed in branch Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_pipelineddmltest' [Pipeline] } Failed in branch Matrix - SCRIPT_AND_ARGS = 'run_real_tikv_tests.sh bazel_flashbacktest' [Pipeline] // parallel [Pipeline] } [Pipeline] // stage [Pipeline] stage [Pipeline] { (Declarative: Post Actions) [Pipeline] container [Pipeline] { [Pipeline] sh + bash scripts/plugins/report_job_result.sh FAILURE result.json http://fileserver.pingcap.net --2024-06-01 01:54:43-- http://fileserver.pingcap.net/download/rd-atom-agent/agent_upload_verifyci_metadata.py Resolving fileserver.pingcap.net (fileserver.pingcap.net)... 10.2.12.82 Connecting to fileserver.pingcap.net (fileserver.pingcap.net)|10.2.12.82|:80... connected. HTTP request sent, awaiting response... 200 OK Length: 4181 (4.1K) [application/octet-stream] Saving to: ‘agent_upload_verifyci_metadata.py’ 0K .... 100% 695M=0s 2024-06-01 01:54:43 (695 MB/s) - ‘agent_upload_verifyci_metadata.py’ saved [4181/4181] No junit report file parse result file result.json success upload data succesfully. [Pipeline] } [Pipeline] // container [Pipeline] archiveArtifacts Archiving artifacts Recording fingerprints [Pipeline] } [Pipeline] // stage [Pipeline] } [Pipeline] // timeout [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // container [Pipeline] } [Pipeline] // withEnv [Pipeline] } [Pipeline] // node [Pipeline] } [Pipeline] // podTemplate [Pipeline] End of Pipeline ERROR: script returned exit code 2 Finished: FAILURE