Skip to content

Console Output

Skipping 1,889 KB.. Full Log
	{"id":"1cb9fde1-3d63-4dad-b145-13f0ea648787","address":"127.0.0.1:8302","version":"v7.5.1-21-g88db1a842"}

/tidb/cdc/default/__cdc_meta__/capture/bc9a03aa-c58e-4cdc-bcbb-eea49c872083
	{"id":"bc9a03aa-c58e-4cdc-bcbb-eea49c872083","address":"127.0.0.1:8301","version":"v7.5.1-21-g88db1a842"}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f29150aca42
	bc9a03aa-c58e-4cdc-bcbb-eea49c872083

/tidb/cdc/default/__cdc_meta__/owner/22318f29150aca69
	1cb9fde1-3d63-4dad-b145-13f0ea648787

/tidb/cdc/default/default/upstream/7363211179252762937
	{"id":7363211179252762937,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/1cb9fde1-3d63-4dad-b145-13f0ea648787
	{"id":"1cb9fde1-3d63-4dad-b145-13f0ea648787","address":"127.0.0.1:8302","version":"v7.5.1-21-g88db1a842"}

/tidb/cdc/default/__cdc_meta__/capture/bc9a03aa-c58e-4cdc-bcbb-eea49c872083
	{"id":"bc9a03aa-c58e-4cdc-bcbb-eea49c872083","address":"127.0.0.1:8301","version":"v7.5.1-21-g88db1a842"}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f29150aca42
	bc9a03aa-c58e-4cdc-bcbb-eea49c872083

/tidb/cdc/default/__cdc_meta__/owner/22318f29150aca69
	1cb9fde1-3d63-4dad-b145-13f0ea648787

/tidb/cdc/default/default/upstream/7363211179252762937
	{"id":7363211179252762937,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
[Mon Apr 29 17:00:19 CST 2024] <<<<<< START cdc server in multi_capture case >>>>>>
+ [[ '' == \t\r\u\e ]]
+ set +e
+ get_info_fail_msg='failed to get info:'
+ etcd_info_msg='etcd info'
+ '[' -z '' ']'
+ curl_status_cmd='curl -vsL --max-time 20 http://127.0.0.1:8303/debug/info'
+ [[ no != \n\o ]]
+ GO_FAILPOINTS=
+ (( i = 0 ))
+ (( i <= 50 ))
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.83948396.out server --log-file /tmp/tidb_cdc_test/multi_capture/cdc3.log --log-level debug --data-dir /tmp/tidb_cdc_test/multi_capture/cdc_data3 --cluster-id default --addr 127.0.0.1:8303
++ curl -vsL --max-time 20 http://127.0.0.1:8303/debug/info
* About to connect() to 127.0.0.1 port 8303 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8303; Connection refused
* Closing connection 0
+ res=
+ echo ''
+ grep -q 'failed to get info:'
+ echo ''
+ grep -q 'etcd info'
+ '[' 0 -eq 50 ']'
+ sleep 3
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   7.5.2
Edition:           Community
Git Commit Hash:   3478895c2a700e4824bb41940260b6b28013275e
Git Commit Branch: release-7.5
UTC Build Time:    2024-04-28 08:20:54
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Enable Features:   pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   7.5.2
Edition:           Community
Git Commit Hash:   3478895c2a700e4824bb41940260b6b28013275e
Git Commit Branch: release-7.5
UTC Build Time:    2024-04-28 08:20:54
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Enable Features:   pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure
Profile:           dist_release
check diff failed 1-th time, retry later
go: downloading github.com/google/s2a-go v0.1.7
go: downloading go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.47.0
go: downloading go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.47.0
go: downloading github.com/googleapis/enterprise-certificate-proxy v0.3.2
go: downloading github.com/felixge/httpsnoop v1.0.4
start tidb cluster in /tmp/tidb_cdc_test/owner_remove_table_error
Starting Upstream PD...
Release Version: v7.5.1-5-g584533652
Edition: Community
Git Commit Hash: 58453365285465cd90bc4472cff2bad7ce4d764b
Git Branch: release-7.5
UTC Build Time:  2024-04-03 10:04:14
Starting Downstream PD...
Release Version: v7.5.1-5-g584533652
Edition: Community
Git Commit Hash: 58453365285465cd90bc4472cff2bad7ce4d764b
Git Branch: release-7.5
UTC Build Time:  2024-04-03 10:04:14
Verifying upstream PD is started...
go: downloading github.com/jmespath/go-jmespath v0.4.0
table processor_delay.t22 exists
table processor_delay.t23 not exists for 1-th check, retry later
nonempty select id, val from test.availability1 where id=1 and val=1
run task successfully
nonempty select id, val from test.availability1 where id=1 and val=22
TEST FAILED: OUTPUT DOES NOT CONTAIN 'id:'
____________________________________
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
run task failed 1-th time, retry later
check diff successfully
go: downloading github.com/modern-go/reflect2 v1.0.2
go: downloading github.com/json-iterator/go v1.1.12
go: downloading github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd
Starting Upstream TiDB...
Release Version: v7.5.1-45-gbf84e231e6
Edition: Community
Git Commit Hash: bf84e231e6ef26891d0cb524d938345f43aa047c
Git Branch: release-7.5
UTC Build Time: 2024-04-29 02:05:15
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v7.5.1-45-gbf84e231e6
Edition: Community
Git Commit Hash: bf84e231e6ef26891d0cb524d938345f43aa047c
Git Branch: release-7.5
UTC Build Time: 2024-04-29 02:05:15
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
check diff failed 2-th time, retry later
+ (( i++ ))
+ (( i <= 50 ))
++ curl -vsL --max-time 20 http://127.0.0.1:8303/debug/info
* About to connect() to 127.0.0.1 port 8303 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8303 (#0)
> GET /debug/info HTTP/1.1
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8303
> Accept: */*
> 
< HTTP/1.1 200 OK
< Date: Mon, 29 Apr 2024 09:00:22 GMT
< Content-Length: 1144
< Content-Type: text/plain; charset=utf-8
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
+ res='

*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/1cb9fde1-3d63-4dad-b145-13f0ea648787
	{"id":"1cb9fde1-3d63-4dad-b145-13f0ea648787","address":"127.0.0.1:8302","version":"v7.5.1-21-g88db1a842"}

/tidb/cdc/default/__cdc_meta__/capture/92e21232-2dcb-464a-b7eb-6b82d0c137ce
	{"id":"92e21232-2dcb-464a-b7eb-6b82d0c137ce","address":"127.0.0.1:8303","version":"v7.5.1-21-g88db1a842"}

/tidb/cdc/default/__cdc_meta__/capture/bc9a03aa-c58e-4cdc-bcbb-eea49c872083
	{"id":"bc9a03aa-c58e-4cdc-bcbb-eea49c872083","address":"127.0.0.1:8301","version":"v7.5.1-21-g88db1a842"}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f29150aca42
	bc9a03aa-c58e-4cdc-bcbb-eea49c872083

/tidb/cdc/default/__cdc_meta__/owner/22318f29150aca69
	1cb9fde1-3d63-4dad-b145-13f0ea648787

/tidb/cdc/default/__cdc_meta__/owner/22318f29150aca78
	92e21232-2dcb-464a-b7eb-6b82d0c137ce

/tidb/cdc/default/default/upstream/7363211179252762937
	{"id":7363211179252762937,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ echo '

*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/1cb9fde1-3d63-4dad-b145-13f0ea648787
	{"id":"1cb9fde1-3d63-4dad-b145-13f0ea648787","address":"127.0.0.1:8302","version":"v7.5.1-21-g88db1a842"}

/tidb/cdc/default/__cdc_meta__/capture/92e21232-2dcb-464a-b7eb-6b82d0c137ce
	{"id":"92e21232-2dcb-464a-b7eb-6b82d0c137ce","address":"127.0.0.1:8303","version":"v7.5.1-21-g88db1a842"}

/tidb/cdc/default/__cdc_meta__/capture/bc9a03aa-c58e-4cdc-bcbb-eea49c872083
	{"id":"bc9a03aa-c58e-4cdc-bcbb-eea49c872083","address":"127.0.0.1:8301","version":"v7.5.1-21-g88db1a842"}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f29150aca42
	bc9a03aa-c58e-4cdc-bcbb-eea49c872083

/tidb/cdc/default/__cdc_meta__/owner/22318f29150aca69
	1cb9fde1-3d63-4dad-b145-13f0ea648787

/tidb/cdc/default/__cdc_meta__/owner/22318f29150aca78
	92e21232-2dcb-464a-b7eb-6b82d0c137ce

/tidb/cdc/default/default/upstream/7363211179252762937
	{"id":7363211179252762937,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'failed to get info:'
+ echo '

*** processors info ***:



*** etcd info ***:

/tidb/cdc/default/__cdc_meta__/capture/1cb9fde1-3d63-4dad-b145-13f0ea648787
	{"id":"1cb9fde1-3d63-4dad-b145-13f0ea648787","address":"127.0.0.1:8302","version":"v7.5.1-21-g88db1a842"}

/tidb/cdc/default/__cdc_meta__/capture/92e21232-2dcb-464a-b7eb-6b82d0c137ce
	{"id":"92e21232-2dcb-464a-b7eb-6b82d0c137ce","address":"127.0.0.1:8303","version":"v7.5.1-21-g88db1a842"}

/tidb/cdc/default/__cdc_meta__/capture/bc9a03aa-c58e-4cdc-bcbb-eea49c872083
	{"id":"bc9a03aa-c58e-4cdc-bcbb-eea49c872083","address":"127.0.0.1:8301","version":"v7.5.1-21-g88db1a842"}

/tidb/cdc/default/__cdc_meta__/meta/meta-version
	1

/tidb/cdc/default/__cdc_meta__/owner/22318f29150aca42
	bc9a03aa-c58e-4cdc-bcbb-eea49c872083

/tidb/cdc/default/__cdc_meta__/owner/22318f29150aca69
	1cb9fde1-3d63-4dad-b145-13f0ea648787

/tidb/cdc/default/__cdc_meta__/owner/22318f29150aca78
	92e21232-2dcb-464a-b7eb-6b82d0c137ce

/tidb/cdc/default/default/upstream/7363211179252762937
	{"id":7363211179252762937,"pd-endpoints":"http://127.0.0.1:2379,http://127.0.0.1:2379","key-path":"","cert-path":"","ca-path":"","cert-allowed-cn":null}'
+ grep -q 'etcd info'
+ break
+ set +x
+ cdc.test -test.coverprofile=/tmp/tidb_cdc_test/cov.multi_capture.cli.8439.out cli changefeed create --start-ts=449414748093284353 --sink-uri=mysql://normal:123456@127.0.0.1:3306/ --server=127.0.0.1:8301
table processor_delay.t23 exists
table processor_delay.t24 not exists for 1-th check, retry later
Resume changefeed with checkpointTs3 449414751096668163
Create changefeed successfully!
ID: 21858f57-b055-4923-b316-f645428e180b
Info: {"upstream_id":7363211179252762937,"namespace":"default","id":"21858f57-b055-4923-b316-f645428e180b","sink_uri":"mysql://normal:xxxxx@127.0.0.1:3306/","create_time":"2024-04-29T17:00:22.503849177+08:00","start_ts":449414748093284353,"config":{"memory_quota":1073741824,"case_sensitive":false,"force_replicate":false,"ignore_ineligible_table":false,"check_gc_safe_point":true,"enable_sync_point":false,"bdr_mode":false,"sync_point_interval":600000000000,"sync_point_retention":86400000000000,"filter":{"rules":["*.*"]},"mounter":{"worker_num":16},"sink":{"csv":{"delimiter":",","quote":"\"","null":"\\N","include_commit_ts":false,"binary_encoding_method":"base64"},"encoder_concurrency":32,"terminator":"\r\n","date_separator":"day","enable_partition_separator":true,"enable_kafka_sink_v2":false,"only_output_updated_columns":false,"delete_only_output_handle_key_columns":false,"advance_timeout":150,"send_bootstrap_interval_in_sec":120,"send_bootstrap_in_msg_count":10000,"send_bootstrap_to_all_partition":true},"consistent":{"level":"none","max_log_size":64,"flush_interval":2000,"meta_flush_interval":200,"encoding_worker_num":16,"flush_worker_num":8,"use_file_backend":false,"memory_usage":{"memory_quota_percentage":50,"event_cache_percentage":0}},"scheduler":{"enable_table_across_nodes":false,"region_threshold":100000,"write_key_threshold":0},"integrity":{"integrity_check_level":"none","corruption_handle_level":"warn"},"changefeed_error_stuck_duration":1800000000000,"sql_mode":"ONLY_FULL_GROUP_BY,STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION","synced_status":{"synced_check_interval":300,"checkpoint_interval":15}},"state":"normal","creator_version":"v7.5.1-21-g88db1a842","resolved_ts":449414748093284353,"checkpoint_ts":449414748093284353,"checkpoint_time":"2024-04-29 17:00:10.683"}
PASS
coverage: 2.4% of statements in github.com/pingcap/tiflow/...
table test.table5 not exists for 1-th check, retry later
{"level":"warn","ts":1714381212.1282222,"caller":"v3@v3.5.10/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00248d500/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: EOF"}
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)

{"level":"warn","ts":1714381214.1268082,"caller":"v3@v3.5.10/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00248d500/127.0.0.1:2379","attempt":1,"error":"rpc error: code = DeadlineExceeded desc = latest balancer error: last connection error: connection error: desc = \"transport: Error while dialing: dial tcp 127.0.0.1:2379: connect: connection refused\""}
/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_cdc_integration_mysql_test/tiflow/tests/integration_tests/consistent_replicate_ddl/run.sh: line 14:  2986 Terminated              sleep 120
script returned exit code 143
nonempty select id, val from test.availability1 where id=1 and val=22
run task successfully
empty select id, val from test.availability1 where id=1
TEST FAILED: OUTPUT CONTAINS 'id:'
____________________________________
*************************** 1. row ***************************
 id: 1
val: 22
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
run task failed 1-th time, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
cdc cli capture list --server http://127.0.0.1:8302 |jq '.|length'|grep -E '^1$'
1
run task successfully
check diff failed 1-th time, retry later
+ set +x
table multi_capture_1.usertable not exists for 1-th check, retry later
table processor_delay.t24 exists
table processor_delay.t25 not exists for 1-th check, retry later
Verifying downstream PD is started...
Starting Upstream TiKV...
TiKV 
Release Version:   7.5.2
Edition:           Community
Git Commit Hash:   3478895c2a700e4824bb41940260b6b28013275e
Git Commit Branch: release-7.5
UTC Build Time:    2024-04-28 08:20:54
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Enable Features:   pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure
Profile:           dist_release
Starting Downstream TiKV...
TiKV 
Release Version:   7.5.2
Edition:           Community
Git Commit Hash:   3478895c2a700e4824bb41940260b6b28013275e
Git Commit Branch: release-7.5
UTC Build Time:    2024-04-28 08:20:54
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Enable Features:   pprof-fp jemalloc mem-profiling portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure
Profile:           dist_release
check diff failed 3-th time, retry later
table test.table5 not exists for 2-th check, retry later
empty select id, val from test.availability1 where id=1
run task successfully
nonempty select id, val from test.availability2 where id=1 and val=1
TEST FAILED: OUTPUT DOES NOT CONTAIN 'id:'
____________________________________
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
run task failed 1-th time, retry later
check diff successfully
wait process cdc.test exit for 1-th time...
table processor_delay.t25 exists
Starting Upstream TiDB...
Release Version: v7.5.1-45-gbf84e231e6
Edition: Community
Git Commit Hash: bf84e231e6ef26891d0cb524d938345f43aa047c
Git Branch: release-7.5
UTC Build Time: 2024-04-29 02:05:15
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Starting Downstream TiDB...
Release Version: v7.5.1-45-gbf84e231e6
Edition: Community
Git Commit Hash: bf84e231e6ef26891d0cb524d938345f43aa047c
Git Branch: release-7.5
UTC Build Time: 2024-04-29 02:05:15
GoVersion: go1.21.6
Race Enabled: false
Check Table Before Drop: false
Store: unistore
Verifying Upstream TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
check diff failed 4-th time, retry later
table processor_delay.t26 not exists for 1-th check, retry later
wait process cdc.test exit for 2-th time...
table multi_capture_1.usertable exists
table multi_capture_2.usertable not exists for 1-th check, retry later
cdc.test: no process found
wait process cdc.test exit for 3-th time...
process cdc.test already exit
[Mon Apr 29 17:00:26 CST 2024] <<<<<< run test case processor_resolved_ts_fallback success! >>>>>>
nonempty select id, val from test.availability2 where id=1 and val=1
run task successfully
nonempty select id, val from test.availability2 where id=1 and val=22
TEST FAILED: OUTPUT DOES NOT CONTAIN 'id:'
____________________________________
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
run task failed 1-th time, retry later
table test.table5 exists
table test.table6 not exists for 1-th check, retry later
kill finished with exit code 0
Sending interrupt signal to process
Killing processes
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
kill finished with exit code 0
Sending interrupt signal to process
Killing processes
check diff failed 5-th time, retry later
table processor_delay.t26 exists
table processor_delay.t27 not exists for 1-th check, retry later
table multi_capture_2.usertable exists
table multi_capture_3.usertable not exists for 1-th check, retry later
{"level":"warn","ts":1714381228.1082995,"caller":"v3@v3.5.10/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0024d6e00/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: EOF"}
script returned exit code 143
table test.table6 exists
check diff failed 1-th time, retry later
nonempty select id, val from test.availability2 where id=1 and val=22
run task successfully
empty select id, val from test.availability2 where id=1
TEST FAILED: OUTPUT CONTAINS 'id:'
____________________________________
*************************** 1. row ***************************
 id: 1
val: 22
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
run task failed 1-th time, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	179	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63ca4560e2c0013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:ap-tiflow-release-7-5-pull-cdc-integration-mysql-test-336-9fpp5, pid:27046, start at 2024-04-29 17:00:26.918319236 +0800 CST m=+5.290184141	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240429-17:02:26.926 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240429-17:00:26.891 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240429-16:50:26.891 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	179	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63ca4560e2c0013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:ap-tiflow-release-7-5-pull-cdc-integration-mysql-test-336-9fpp5, pid:27046, start at 2024-04-29 17:00:26.918319236 +0800 CST m=+5.290184141	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240429-17:02:26.926 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240429-17:00:26.891 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240429-16:50:26.891 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	179	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63ca4560ed00013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:ap-tiflow-release-7-5-pull-cdc-integration-mysql-test-336-9fpp5, pid:27117, start at 2024-04-29 17:00:26.960697118 +0800 CST m=+5.271027929	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240429-17:02:26.967 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240429-17:00:26.932 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240429-16:50:26.932 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v7.5.1-12-g9002cc34d
Edition:         Community
Git Commit Hash: 9002cc34d3b593a718b6c5260ba18f30a45ab314
Git Branch:      HEAD
UTC Build Time:  2024-04-18 07:24:48
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO

Raft Proxy
Git Commit Hash:   521fd9dbc55e58646045d88f91c3c35db50b5981
Git Commit Branch: HEAD
UTC Build Time:    2024-04-18 07:28:40
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:    portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/autorandom/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/autorandom/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/autorandom/tiflash-proxy.toml"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/autorandom/tiflash/log/proxy.log"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/autorandom/tiflash/db/proxy"] }, "engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["9002cc34d3b593a718b6c5260ba18f30a45ab314"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v7.5.1-12-g9002cc34d"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
kill finished with exit code 0
Sending interrupt signal to process
Killing processes
table processor_delay.t27 exists
table processor_delay.t28 not exists for 1-th check, retry later
table multi_capture_3.usertable exists
{"level":"warn","ts":1714381230.082542,"caller":"v3@v3.5.10/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0008748c0/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: read tcp 127.0.0.1:58880->127.0.0.1:2379: read: connection reset by peer"}
script returned exit code 143
kill finished with exit code 0
Sending interrupt signal to process
Killing processes
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
check diff successfully
script returned exit code 143
kill finished with exit code 0
Sending interrupt signal to process
Killing processes
script returned exit code 143
check diff successfully
kill finished with exit code 0
Killing processes
{"level":"warn","ts":1714381231.3025887,"caller":"v3@v3.5.10/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0021c4000/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: EOF"}
script returned exit code 143
Resume changefeed with checkpointTs1 449414749196124167
empty select id, val from test.availability2 where id=1
run task successfully
nonempty select id, val from test.availability3 where id=1 and val=1
TEST FAILED: OUTPUT DOES NOT CONTAIN 'id:'
____________________________________
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
run task failed 1-th time, retry later
script returned exit code 143
table test.table3 not exists for 1-th check, retry later
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
table processor_delay.t28 exists
table processor_delay.t29 not exists for 1-th check, retry later
nonempty select id, val from test.availability3 where id=1 and val=1
run task successfully
nonempty select id, val from test.availability3 where id=1 and val=22
kill finished with exit code 1
Sending interrupt signal to process
Killing processes
table test.table3 not exists for 2-th check, retry later
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	179	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63ca45652d00006	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:ap-tiflow-release-7-5-pull-cdc-integration-mysql-test-336-z7zg2, pid:9763, start at 2024-04-29 17:00:31.290958038 +0800 CST m=+5.428338745	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240429-17:02:31.298 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240429-17:00:31.284 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240429-16:50:31.284 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	179	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63ca45652d00006	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:ap-tiflow-release-7-5-pull-cdc-integration-mysql-test-336-z7zg2, pid:9763, start at 2024-04-29 17:00:31.290958038 +0800 CST m=+5.428338745	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240429-17:02:31.298 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240429-17:00:31.284 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240429-16:50:31.284 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Verifying Downstream TiDB is started...
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	179	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
tikv_gc_leader_uuid	63ca45652a00013	Current GC worker leader UUID. (DO NOT EDIT)
tikv_gc_leader_desc	host:ap-tiflow-release-7-5-pull-cdc-integration-mysql-test-336-z7zg2, pid:9836, start at 2024-04-29 17:00:31.299855932 +0800 CST m=+5.365780692	Host name and pid of current GC leader. (DO NOT EDIT)
tikv_gc_leader_lease	20240429-17:02:31.307 +0800	Current GC worker leader lease. (DO NOT EDIT)
tikv_gc_auto_concurrency	true	Let TiDB pick the concurrency automatically. If set false, tikv_gc_concurrency will be used
tikv_gc_enable	true	Current GC enable status
tikv_gc_run_interval	10m0s	GC run interval, at least 10m, in Go format.
tikv_gc_life_time	10m0s	All versions within life time will not be collected by GC, at least 10m, in Go format.
tikv_gc_last_run_time	20240429-17:00:31.272 +0800	The time when last GC starts. (DO NOT EDIT)
tikv_gc_safe_point	20240429-16:50:31.272 +0800	All versions after safe point can be accessed. (DO NOT EDIT)
Starting Upstream TiFlash...
TiFlash
Release Version: v7.5.1-12-g9002cc34d
Edition:         Community
Git Commit Hash: 9002cc34d3b593a718b6c5260ba18f30a45ab314
Git Branch:      HEAD
UTC Build Time:  2024-04-18 07:24:48
Enable Features: jemalloc sm4(GmSSL) avx2 avx512 unwind thinlto
Profile:         RELWITHDEBINFO

Raft Proxy
Git Commit Hash:   521fd9dbc55e58646045d88f91c3c35db50b5981
Git Commit Branch: HEAD
UTC Build Time:    2024-04-18 07:28:40
Rust Version:      rustc 1.67.0-nightly (96ddd32c4 2022-11-14)
Storage Engine:    tiflash
Prometheus Prefix: tiflash_proxy_
Profile:           release
Enable Features:    portable sse test-engine-kv-rocksdb test-engine-raft-raft-engine cloud-aws cloud-gcp cloud-azure
Verifying Upstream TiFlash is started...
Logging trace to /tmp/tidb_cdc_test/owner_remove_table_error/tiflash/log/server.log
Logging errors to /tmp/tidb_cdc_test/owner_remove_table_error/tiflash/log/error.log
arg matches is ArgMatches { args: {"engine-git-hash": MatchedArg { occurs: 1, indices: [10], vals: ["9002cc34d3b593a718b6c5260ba18f30a45ab314"] }, "addr": MatchedArg { occurs: 1, indices: [20], vals: ["127.0.0.1:9000"] }, "pd-endpoints": MatchedArg { occurs: 1, indices: [16], vals: ["127.0.0.1:2379"] }, "log-file": MatchedArg { occurs: 1, indices: [18], vals: ["/tmp/tidb_cdc_test/owner_remove_table_error/tiflash/log/proxy.log"] }, "engine-addr": MatchedArg { occurs: 1, indices: [2], vals: ["127.0.0.1:9500"] }, "config": MatchedArg { occurs: 1, indices: [8], vals: ["/tmp/tidb_cdc_test/owner_remove_table_error/tiflash-proxy.toml"] }, "data-dir": MatchedArg { occurs: 1, indices: [6], vals: ["/tmp/tidb_cdc_test/owner_remove_table_error/tiflash/db/proxy"] }, "engine-label": MatchedArg { occurs: 1, indices: [14], vals: ["tiflash"] }, "advertise-addr": MatchedArg { occurs: 1, indices: [4], vals: ["127.0.0.1:9000"] }, "engine-version": MatchedArg { occurs: 1, indices: [12], vals: ["v7.5.1-12-g9002cc34d"] }}, subcommand: None, usage: Some("USAGE:\n    TiFlash Proxy [FLAGS] [OPTIONS] --engine-git-hash <engine-git-hash> --engine-label <engine-label> --engine-version <engine-version>") }
run task successfully
empty select id, val from test.availability3 where id=1
TEST FAILED: OUTPUT CONTAINS 'id:'
____________________________________
*************************** 1. row ***************************
 id: 1
val: 22
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
run task failed 1-th time, retry later
kill finished with exit code 0
Sending interrupt signal to process
Killing processes
table processor_delay.t29 exists
table processor_delay.t30 not exists for 1-th check, retry later
{"level":"warn","ts":1714381234.405157,"caller":"v3@v3.5.10/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc002408a80/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: read tcp 127.0.0.1:46188->127.0.0.1:2379: read: connection reset by peer"}
script returned exit code 143
kill finished with exit code 0
Sending interrupt signal to process
Killing processes
kill finished with exit code 0
Sending interrupt signal to process
Killing processes
script returned exit code 143
kill finished with exit code 0
Sending interrupt signal to process
Killing processes
{"level":"warn","ts":1714381234.9470356,"caller":"v3@v3.5.10/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0022b0c40/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: EOF"}
script returned exit code 143
kill finished with exit code 0
{"level":"warn","ts":1714381235.218442,"caller":"v3@v3.5.10/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc00307a1c0/127.0.0.1:2479","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: EOF"}
[Pipeline] // podTemplate
script returned exit code 143
[Pipeline] }
{"level":"warn","ts":1714381235.488721,"caller":"v3@v3.5.10/retry_interceptor.go:62","msg":"retrying of unary invoker failed","target":"etcd-endpoints://0xc0013ec540/127.0.0.1:2379","attempt":0,"error":"rpc error: code = Unavailable desc = error reading from server: EOF"}
script returned exit code 143
[Pipeline] // withEnv
[Pipeline] // podTemplate
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
Cache not saved (ws/jenkins-pingcap-tiflow-release-7.5-pull_cdc_integration_mysql_test-336/tiflow-cdc already exists)
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] // stage
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // cache
[Pipeline] // cache
[Pipeline] // cache
[Pipeline] // cache
[Pipeline] // cache
[Pipeline] // cache
[Pipeline] // cache
[Pipeline] // cache
[Pipeline] // cache
[Pipeline] // cache
[Pipeline] // cache
[Pipeline] // cache
[Pipeline] // cache
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // stage
[Pipeline] // cache
[Pipeline] // cache
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // dir
[Pipeline] // dir
[Pipeline] // dir
[Pipeline] // dir
[Pipeline] // dir
[Pipeline] // dir
[Pipeline] // dir
[Pipeline] // dir
[Pipeline] // dir
[Pipeline] // dir
[Pipeline] // dir
[Pipeline] // dir
[Pipeline] // dir
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // dir
[Pipeline] // dir
[Pipeline] }
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] // withCredentials
[Pipeline] // withCredentials
[Pipeline] // withCredentials
[Pipeline] // withCredentials
[Pipeline] // withCredentials
[Pipeline] // withCredentials
[Pipeline] // withCredentials
[Pipeline] // withCredentials
[Pipeline] // withCredentials
[Pipeline] // withCredentials
[Pipeline] // withCredentials
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] }
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // timeout
[Pipeline] // timeout
[Pipeline] }
[Pipeline] }
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] }
[Pipeline] }
[Pipeline] // container
[Pipeline] // container
[Pipeline] // container
[Pipeline] // container
[Pipeline] // container
[Pipeline] // container
[Pipeline] // container
[Pipeline] // container
[Pipeline] // container
[Pipeline] // container
[Pipeline] // container
[Pipeline] // container
[Pipeline] // container
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // container
[Pipeline] // container
[Pipeline] }
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] }
[Pipeline] // node
[Pipeline] // node
[Pipeline] // node
[Pipeline] // node
[Pipeline] // node
[Pipeline] // node
[Pipeline] // node
[Pipeline] // node
[Pipeline] // node
[Pipeline] // node
[Pipeline] // node
[Pipeline] // node
[Pipeline] // node
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // node
[Pipeline] // node
[Pipeline] }
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] // podTemplate
[Pipeline] // podTemplate
[Pipeline] // podTemplate
[Pipeline] // podTemplate
[Pipeline] // podTemplate
[Pipeline] // podTemplate
[Pipeline] // podTemplate
[Pipeline] // podTemplate
[Pipeline] // podTemplate
[Pipeline] // podTemplate
[Pipeline] // podTemplate
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] }
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G00'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G02'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G04'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G05'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G07'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G08'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G09'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G10'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G11'
[Pipeline] }
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G16'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G17'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G18'
[Pipeline] // stage
[Pipeline] // stage
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G19'
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G21'
[Pipeline] // parallel
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] End of Pipeline
ERROR: script returned exit code 1
Finished: FAILURE