Skip to content

Console Output

Skipping 1,141 KB.. Full Log
('create_noshard_task_success resp=', {u'check_result': u'fail to check synchronization configuration with type: no errors but some warnings\n detail: {\n\t\t"results": [\n\t\t\t{\n\t\t\t\t"id": 13,\n\t\t\t\t"name": "mysql_version",\n\t\t\t\t"desc": "check whether mysql version is satisfied",\n\t\t\t\t"state": "warn",\n\t\t\t\t"errors": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"severity": "warn",\n\t\t\t\t\t\t"short_error": "version suggested earlier than 8.0.0 but got 8.0.21"\n\t\t\t\t\t}\n\t\t\t\t],\n\t\t\t\t"instruction": "It is recommended that you select a database version that meets the requirements before performing data migration. Otherwise data inconsistency or task exceptions might occur.",\n\t\t\t\t"extra": "address of db instance - 127.0.0.1:3307"\n\t\t\t}\n\t\t],\n\t\t"summary": {\n\t\t\t"passed": true,\n\t\t\t"total": 23,\n\t\t\t"successful": 22,\n\t\t\t"failed": 0,\n\t\t\t"warning": 1\n\t\t}\n\t}', u'task': {u'source_config': {u'source_conf': [{u'source_name': u'mysql-01'}, {u'source_name': u'mysql-02'}]}, u'enhance_online_schema_change': True, u'on_duplicate': u'error', u'table_migrate_rule': [{u'source': {u'table': u'*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'table': u'task2_target_table', u'schema': u'openapi'}}, {u'source': {u'table': u'*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'table': u'task2_target_table', u'schema': u'openapi'}}], u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm-meta', u'task_mode': u'all', u'name': u'test-2'}})
dmctl test cmd: "query-status test-2"
got=2 expected=2
dmctl test cmd: "query-status test-2"
got=2 expected=2
('get_task_list_with_status resp=', {u'total': 2, u'data': [{u'strict_optimistic_shard_mode': False, u'source_config': {u'incr_migrate_conf': {u'repl_batch': 100, u'repl_threads': 16}, u'full_migrate_conf': {u'data_dir': u'./dumped_data', u'export_threads': 4, u'import_threads': 16}, u'source_conf': [{u'source_name': u'mysql-02'}, {u'source_name': u'mysql-01'}]}, u'enhance_online_schema_change': True, u'on_duplicate': u'replace', u'table_migrate_rule': [{u'source': {u'table': u'*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'table': u'task1_target_table', u'schema': u'openapi'}}, {u'source': {u'table': u'*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'table': u'task1_target_table', u'schema': u'openapi'}}], u'task_mode': u'all', u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm-meta', u'status_list': [{u'source_name': u'mysql-01', u'name': u'test-1', u'unresolved_ddl_lock_id': u'', u'sync_status': {u'binlog_type': u'remote', u'blocking_ddls': None, u'recent_tps': 0, u'total_events': 0, u'master_binlog_gtid': u'37937a44-0531-11ef-a93f-369273bbfa41:1-19', u'seconds_behind_master': 0, u'master_binlog': u'(dm-it-8ad00964-919c-4e5f-b41c-63b6b7bf8e3c-7klxz-982f1-bin.000001, 3891)', u'syncer_binlog_gtid': u'37937a44-0531-11ef-a93f-369273bbfa41:1-19', u'synced': True, u'total_tps': 0, u'unresolved_groups': None, u'syncer_binlog': u'(dm-it-8ad00964-919c-4e5f-b41c-63b6b7bf8e3c-7klxz-982f1-bin.000001, 3891)'}, u'worker_name': u'worker1', u'unit': u'Sync', u'stage': u'Running'}, {u'source_name': u'mysql-02', u'name': u'test-1', u'unresolved_ddl_lock_id': u'', u'sync_status': {u'binlog_type': u'remote', u'blocking_ddls': None, u'recent_tps': 0, u'total_events': 0, u'master_binlog_gtid': u'37e89242-0531-11ef-b772-369273bbfa41:1-15', u'seconds_behind_master': 0, u'master_binlog': u'(dm-it-8ad00964-919c-4e5f-b41c-63b6b7bf8e3c-7klxz-982f1-bin.000001, 3395)', u'syncer_binlog_gtid': u'37e89242-0531-11ef-b772-369273bbfa41:1-15', u'synced': True, u'total_tps': 0, u'unresolved_groups': None, u'syncer_binlog': u'(dm-it-8ad00964-919c-4e5f-b41c-63b6b7bf8e3c-7klxz-982f1-bin.000001, 3395)'}, u'worker_name': u'worker2', u'unit': u'Sync', u'stage': u'Running'}], u'name': u'test-1'}, {u'strict_optimistic_shard_mode': False, u'source_config': {u'incr_migrate_conf': {u'repl_batch': 100, u'repl_threads': 16}, u'full_migrate_conf': {u'data_dir': u'./dumped_data', u'export_threads': 4, u'import_threads': 16}, u'source_conf': [{u'source_name': u'mysql-01'}, {u'source_name': u'mysql-02'}]}, u'enhance_online_schema_change': True, u'on_duplicate': u'replace', u'table_migrate_rule': [{u'source': {u'table': u'*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'table': u'task2_target_table', u'schema': u'openapi'}}, {u'source': {u'table': u'*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'table': u'task2_target_table', u'schema': u'openapi'}}], u'task_mode': u'all', u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm-meta', u'status_list': [{u'source_name': u'mysql-02', u'name': u'test-2', u'unresolved_ddl_lock_id': u'', u'sync_status': {u'binlog_type': u'remote', u'blocking_ddls': None, u'recent_tps': 0, u'total_events': 0, u'master_binlog_gtid': u'37e89242-0531-11ef-b772-369273bbfa41:1-15', u'seconds_behind_master': 0, u'master_binlog': u'(dm-it-8ad00964-919c-4e5f-b41c-63b6b7bf8e3c-7klxz-982f1-bin.000001, 3395)', u'syncer_binlog_gtid': u'37e89242-0531-11ef-b772-369273bbfa41:1-15', u'synced': True, u'total_tps': 0, u'unresolved_groups': None, u'syncer_binlog': u'(dm-it-8ad00964-919c-4e5f-b41c-63b6b7bf8e3c-7klxz-982f1-bin.000001, 3395)'}, u'worker_name': u'worker2', u'unit': u'Sync', u'stage': u'Running'}, {u'source_name': u'mysql-01', u'name': u'test-2', u'unresolved_ddl_lock_id': u'', u'sync_status': {u'binlog_type': u'remote', u'blocking_ddls': None, u'recent_tps': 0, u'total_events': 0, u'master_binlog_gtid': u'37937a44-0531-11ef-a93f-369273bbfa41:1-19', u'seconds_behind_master': 0, u'master_binlog': u'(dm-it-8ad00964-919c-4e5f-b41c-63b6b7bf8e3c-7klxz-982f1-bin.000001, 3891)', u'syncer_binlog_gtid': u'37937a44-0531-11ef-a93f-369273bbfa41:1-19', u'synced': True, u'total_tps': 0, u'unresolved_groups': None, u'syncer_binlog': u'(dm-it-8ad00964-919c-4e5f-b41c-63b6b7bf8e3c-7klxz-982f1-bin.000001, 3891)'}, u'worker_name': u'worker1', u'unit': u'Sync', u'stage': u'Running'}], u'name': u'test-2'}]})
delete_source_with_force_success
delete_source_with_force_success
('list_source_by_openapi_success resp=', {u'total': 0, u'data': []})
('get_task_list resp=', {u'total': 0, u'data': []})
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>TEST OPENAPI: MULTI TASK SUCCESS
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>START TEST OPENAPI: NO SHARD TASK
('create_source1_success resp=', {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'host': u'127.0.0.1', u'user': u'root', u'security': None, u'password': u'123456', u'port': 3306})
('list_source_by_openapi_success resp=', {u'total': 1, u'data': [{u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3306}]})
('get_source_status_success resp=', {u'total': 1, u'data': [{u'worker_name': u'worker1', u'source_name': u'mysql-01'}]})
('create_source1_success resp=', {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-02', u'host': u'127.0.0.1', u'user': u'root', u'security': None, u'password': u'123456', u'port': 3307})
('list_source_by_openapi_success resp=', {u'total': 2, u'data': [{u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3306}, {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-02', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3307}]})
('get_source_status_success resp=', {u'total': 1, u'data': [{u'worker_name': u'worker2', u'source_name': u'mysql-02'}]})
('create_noshard_task_success resp=', {u'check_result': u'fail to check synchronization configuration with type: no errors but some warnings\n detail: {\n\t\t"results": [\n\t\t\t{\n\t\t\t\t"id": 13,\n\t\t\t\t"name": "mysql_version",\n\t\t\t\t"desc": "check whether mysql version is satisfied",\n\t\t\t\t"state": "warn",\n\t\t\t\t"errors": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"severity": "warn",\n\t\t\t\t\t\t"short_error": "version suggested earlier than 8.0.0 but got 8.0.21"\n\t\t\t\t\t}\n\t\t\t\t],\n\t\t\t\t"instruction": "It is recommended that you select a database version that meets the requirements before performing data migration. Otherwise data inconsistency or task exceptions might occur.",\n\t\t\t\t"extra": "address of db instance - 127.0.0.1:3307"\n\t\t\t}\n\t\t],\n\t\t"summary": {\n\t\t\t"passed": true,\n\t\t\t"total": 23,\n\t\t\t"successful": 22,\n\t\t\t"failed": 0,\n\t\t\t"warning": 1\n\t\t}\n\t}', u'task': {u'source_config': {u'source_conf': [{u'source_name': u'mysql-01'}, {u'source_name': u'mysql-02'}]}, u'enhance_online_schema_change': True, u'on_duplicate': u'error', u'table_migrate_rule': [{u'source': {u'table': u'*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}, {u'source': {u'table': u'*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}], u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm-meta', u'task_mode': u'all', u'name': u'test-no-shard'}})
dmctl test cmd: "query-status test-no-shard"
got=2 expected=2
dmctl test cmd: "query-status test-no-shard"
got=2 expected=2
check diff successfully
('get_task_status_failed resp=', {u'error_code': 46018, u'error_msg': u'[code=46018:class=scheduler:scope=internal:level=medium], Message: task with name not a task name not exist, Workaround: Please use `query-status` command to see tasks.'})
('get_task_status_success resp=', {u'total': 2, u'data': [{u'source_name': u'mysql-01', u'name': u'test-no-shard', u'unresolved_ddl_lock_id': u'', u'sync_status': {u'binlog_type': u'remote', u'blocking_ddls': None, u'recent_tps': 0, u'total_events': 5, u'master_binlog_gtid': u'37937a44-0531-11ef-a93f-369273bbfa41:1-23', u'seconds_behind_master': 0, u'master_binlog': u'(dm-it-8ad00964-919c-4e5f-b41c-63b6b7bf8e3c-7klxz-982f1-bin.000001, 4679)', u'syncer_binlog_gtid': u'37937a44-0531-11ef-a93f-369273bbfa41:1-22', u'synced': False, u'total_tps': 0, u'unresolved_groups': None, u'syncer_binlog': u'(dm-it-8ad00964-919c-4e5f-b41c-63b6b7bf8e3c-7klxz-982f1-bin.000001, 4425)'}, u'worker_name': u'worker1', u'unit': u'Sync', u'stage': u'Running'}, {u'source_name': u'mysql-02', u'name': u'test-no-shard', u'unresolved_ddl_lock_id': u'', u'sync_status': {u'binlog_type': u'remote', u'blocking_ddls': None, u'recent_tps': 0, u'total_events': 5, u'master_binlog_gtid': u'37e89242-0531-11ef-b772-369273bbfa41:1-19', u'seconds_behind_master': 0, u'master_binlog': u'(dm-it-8ad00964-919c-4e5f-b41c-63b6b7bf8e3c-7klxz-982f1-bin.000001, 4279)', u'syncer_binlog_gtid': u'37e89242-0531-11ef-b772-369273bbfa41:1-18', u'synced': False, u'total_tps': 0, u'unresolved_groups': None, u'syncer_binlog': u'(dm-it-8ad00964-919c-4e5f-b41c-63b6b7bf8e3c-7klxz-982f1-bin.000001, 4005)'}, u'worker_name': u'worker2', u'unit': u'Sync', u'stage': u'Running'}]})
delete_source_with_force_success
('get_task_status_success resp=', {u'total': 1, u'data': [{u'source_name': u'mysql-02', u'name': u'test-no-shard', u'unresolved_ddl_lock_id': u'', u'sync_status': {u'binlog_type': u'remote', u'blocking_ddls': None, u'recent_tps': 5, u'total_events': 5, u'master_binlog_gtid': u'37e89242-0531-11ef-b772-369273bbfa41:1-19', u'seconds_behind_master': 0, u'master_binlog': u'(dm-it-8ad00964-919c-4e5f-b41c-63b6b7bf8e3c-7klxz-982f1-bin.000001, 4279)', u'syncer_binlog_gtid': u'37e89242-0531-11ef-b772-369273bbfa41:1-18', u'synced': False, u'total_tps': 5, u'unresolved_groups': None, u'syncer_binlog': u'(dm-it-8ad00964-919c-4e5f-b41c-63b6b7bf8e3c-7klxz-982f1-bin.000001, 4005)'}, u'worker_name': u'worker2', u'unit': u'Sync', u'stage': u'Running'}]})
('get_task_list resp=', {u'total': 1, u'data': [{u'strict_optimistic_shard_mode': False, u'source_config': {u'incr_migrate_conf': {u'repl_batch': 100, u'repl_threads': 16}, u'full_migrate_conf': {u'data_dir': u'./dumped_data', u'export_threads': 4, u'import_threads': 16}, u'source_conf': [{u'source_name': u'mysql-02'}]}, u'enhance_online_schema_change': True, u'on_duplicate': u'replace', u'table_migrate_rule': [{u'source': {u'table': u'*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'schema': u'openapi'}}], u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm-meta', u'task_mode': u'all', u'name': u'test-no-shard'}]})
('get_task_schema_success schema resp=', [u'openapi'])
('get_task_schema_success table resp=', [u't2'])
('get_task_schema_success create table resp=', {u'table_name': u't2', u'schema_create_sql': u'CREATE TABLE `t2` ( `i` tinyint(4) DEFAULT NULL, `j` int(11) DEFAULT NULL, UNIQUE KEY `j` (`j`)) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_bin', u'schema_name': u'openapi'})
('get_task_schema_success table resp=', [u't2'])
('delete_task_failed resp=', {u'error_code': 49001, u'error_msg': u'[code=49001:class=openapi:scope=internal:level=high], Message: task test-no-shard have running subtasks, please stop them or delete task with force.'})
delete_task_success
('get_task_list resp=', {u'total': 0, u'data': []})
delete_source_success
('list_source_by_openapi_success resp=', {u'total': 0, u'data': []})
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>TEST OPENAPI: NO SHARD TASK SUCCESS
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>START TEST OPENAPI: TASK TEMPLATES
('create_source1_success resp=', {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'host': u'127.0.0.1', u'user': u'root', u'security': None, u'password': u'123456', u'port': 3306})
('create_source1_success resp=', {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-02', u'host': u'127.0.0.1', u'user': u'root', u'security': None, u'password': u'123456', u'port': 3307})
('list_source_by_openapi_success resp=', {u'total': 2, u'data': [{u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3306}, {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-02', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3307}]})
('create_task_template_failed resp=', {u'error': u'error in openapi3filter.RequestError: request body has an error: doesn\'t match the schema: Error at "/shard_mode": value is not one of the allowed values'})
('create_task_template_success resp=', {u'source_config': {u'source_conf': [{u'source_name': u'mysql-01'}, {u'source_name': u'mysql-02'}]}, u'enhance_online_schema_change': True, u'on_duplicate': u'error', u'table_migrate_rule': [{u'source': {u'table': u'*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}, {u'source': {u'table': u'*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}], u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm-meta', u'shard_mode': u'pessimistic', u'task_mode': u'all', u'name': u'test-1'})
('list_task_template resp=', {u'total': 1, u'data': [{u'source_config': {u'source_conf': [{u'source_name': u'mysql-01'}, {u'source_name': u'mysql-02'}]}, u'enhance_online_schema_change': True, u'on_duplicate': u'error', u'table_migrate_rule': [{u'source': {u'table': u'*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}, {u'source': {u'table': u'*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}], u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm-meta', u'shard_mode': u'pessimistic', u'task_mode': u'all', u'name': u'test-1'}]})
('get_task_template resp=', {u'source_config': {u'source_conf': [{u'source_name': u'mysql-01'}, {u'source_name': u'mysql-02'}]}, u'enhance_online_schema_change': True, u'on_duplicate': u'error', u'table_migrate_rule': [{u'source': {u'table': u'*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}, {u'source': {u'table': u'*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}], u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm-meta', u'shard_mode': u'pessimistic', u'task_mode': u'all', u'name': u'test-1'})
('update_task_template_success resp=', {u'source_config': {u'source_conf': [{u'source_name': u'mysql-01'}, {u'source_name': u'mysql-02'}]}, u'enhance_online_schema_change': True, u'on_duplicate': u'error', u'table_migrate_rule': [{u'source': {u'table': u'*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}, {u'source': {u'table': u'*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}], u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm-meta', u'shard_mode': u'pessimistic', u'task_mode': u'full', u'name': u'test-1'})
delete_task_template
('list_task_template resp=', {u'total': 0, u'data': []})
('create_noshard_task_success resp=', {u'check_result': u'fail to check synchronization configuration with type: no errors but some warnings\n detail: {\n\t\t"results": [\n\t\t\t{\n\t\t\t\t"id": 13,\n\t\t\t\t"name": "mysql_version",\n\t\t\t\t"desc": "check whether mysql version is satisfied",\n\t\t\t\t"state": "warn",\n\t\t\t\t"errors": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"severity": "warn",\n\t\t\t\t\t\t"short_error": "version suggested earlier than 8.0.0 but got 8.0.21"\n\t\t\t\t\t}\n\t\t\t\t],\n\t\t\t\t"instruction": "It is recommended that you select a database version that meets the requirements before performing data migration. Otherwise data inconsistency or task exceptions might occur.",\n\t\t\t\t"extra": "address of db instance - 127.0.0.1:3307"\n\t\t\t}\n\t\t],\n\t\t"summary": {\n\t\t\t"passed": true,\n\t\t\t"total": 23,\n\t\t\t"successful": 22,\n\t\t\t"failed": 0,\n\t\t\t"warning": 1\n\t\t}\n\t}', u'task': {u'source_config': {u'source_conf': [{u'source_name': u'mysql-01'}, {u'source_name': u'mysql-02'}]}, u'enhance_online_schema_change': True, u'on_duplicate': u'error', u'table_migrate_rule': [{u'source': {u'table': u'*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}, {u'source': {u'table': u'*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}], u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm-meta', u'task_mode': u'all', u'name': u'test-1'}})
dmctl test cmd: "query-status test-1"
got=2 expected=2
dmctl test cmd: "query-status test-1"
got=2 expected=2
('import_task_template resp=', {u'success_task_list': [u'test-1'], u'failed_task_list': []})
('list_task_template resp=', {u'total': 1, u'data': [{u'strict_optimistic_shard_mode': False, u'source_config': {u'incr_migrate_conf': {u'repl_batch': 100, u'repl_threads': 16}, u'full_migrate_conf': {u'data_dir': u'./dumped_data', u'export_threads': 4, u'import_threads': 16}, u'source_conf': [{u'source_name': u'mysql-02'}, {u'source_name': u'mysql-01'}]}, u'enhance_online_schema_change': True, u'on_duplicate': u'replace', u'table_migrate_rule': [{u'source': {u'table': u'*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'schema': u'openapi'}}, {u'source': {u'table': u'*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'schema': u'openapi'}}], u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm-meta', u'task_mode': u'all', u'name': u'test-1'}]})
dmctl test cmd: "config task test-1 --path /tmp/dm_test/openapi/get_task_from_task.yaml"
dmctl test cmd: "config task-template test-1 --path /tmp/dm_test/openapi/get_task_from_task_template.yaml"
delete_source_with_force_success
delete_source_with_force_success
('list_source_by_openapi_success resp=', {u'total': 0, u'data': []})
('get_task_list resp=', {u'total': 0, u'data': []})
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>TEST OPENAPI: TASK TEMPLATES
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>START TEST OPENAPI: NO SHARD TASK DUMP STATUS
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
[Sun Apr 28 15:51:59 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/openapi/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
[Sun Apr 28 15:52:00 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/openapi/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
('create_source1_success resp=', {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'host': u'127.0.0.1', u'user': u'root', u'security': None, u'password': u'123456', u'port': 3306})
('list_source_by_openapi_success resp=', {u'total': 1, u'data': [{u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3306}]})
('get_source_status_success resp=', {u'total': 1, u'data': [{u'worker_name': u'worker1', u'source_name': u'mysql-01'}]})
('create_source1_success resp=', {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-02', u'host': u'127.0.0.1', u'user': u'root', u'security': None, u'password': u'123456', u'port': 3307})
('list_source_by_openapi_success resp=', {u'total': 2, u'data': [{u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3306}, {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-02', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3307}]})
('get_source_status_success resp=', {u'total': 1, u'data': [{u'worker_name': u'worker2', u'source_name': u'mysql-02'}]})
('create_noshard_task_success resp=', {u'check_result': u'fail to check synchronization configuration with type: no errors but some warnings\n detail: {\n\t\t"results": [\n\t\t\t{\n\t\t\t\t"id": 13,\n\t\t\t\t"name": "mysql_version",\n\t\t\t\t"desc": "check whether mysql version is satisfied",\n\t\t\t\t"state": "warn",\n\t\t\t\t"errors": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"severity": "warn",\n\t\t\t\t\t\t"short_error": "version suggested earlier than 8.0.0 but got 8.0.21"\n\t\t\t\t\t}\n\t\t\t\t],\n\t\t\t\t"instruction": "It is recommended that you select a database version that meets the requirements before performing data migration. Otherwise data inconsistency or task exceptions might occur.",\n\t\t\t\t"extra": "address of db instance - 127.0.0.1:3307"\n\t\t\t}\n\t\t],\n\t\t"summary": {\n\t\t\t"passed": true,\n\t\t\t"total": 23,\n\t\t\t"successful": 22,\n\t\t\t"failed": 0,\n\t\t\t"warning": 1\n\t\t}\n\t}', u'task': {u'source_config': {u'source_conf': [{u'source_name': u'mysql-01'}, {u'source_name': u'mysql-02'}]}, u'enhance_online_schema_change': True, u'on_duplicate': u'error', u'table_migrate_rule': [{u'source': {u'table': u'*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}, {u'source': {u'table': u'*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}], u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm-meta', u'task_mode': u'all', u'name': u'test-no-shard-dump-status'}})
dmctl test cmd: "query-status test-no-shard-dump-status"
got=2 expected=2
dmctl test cmd: "query-status test-no-shard-dump-status"
got=2 expected=2
got=2 expected=2
('check_dump_status_success resp=', {u'total': 2, u'data': [{u'source_name': u'mysql-01', u'name': u'test-no-shard-dump-status', u'unresolved_ddl_lock_id': u'', u'worker_name': u'worker1', u'dump_status': {u'estimate_total_rows': 0, u'finished_bytes': 0, u'completed_tables': 0, u'total_tables': 0, u'finished_rows': 0}, u'unit': u'Dump', u'stage': u'Running'}, {u'source_name': u'mysql-02', u'name': u'test-no-shard-dump-status', u'unresolved_ddl_lock_id': u'', u'worker_name': u'worker2', u'dump_status': {u'estimate_total_rows': 0, u'finished_bytes': 0, u'completed_tables': 0, u'total_tables': 0, u'finished_rows': 0}, u'unit': u'Dump', u'stage': u'Running'}]})
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
('get_task_status_success_but_worker_meet_error resp=', {u'total': 2, u'data': [{u'source_name': u'mysql-01', u'name': u'test-no-shard-dump-status', u'worker_name': u'', u'error_msg': u'[code=38029:class=dm-master:scope=internal:level=high], Message: mysql-01 relevant worker-client not found, Workaround: Please use list-member command to see if the some workers are offline.', u'unit': u'', u'stage': u''}, {u'source_name': u'mysql-02', u'name': u'test-no-shard-dump-status', u'worker_name': u'', u'error_msg': u'[code=38029:class=dm-master:scope=internal:level=high], Message: mysql-02 relevant worker-client not found, Workaround: Please use list-member command to see if the some workers are offline.', u'unit': u'', u'stage': u''}]})

Usage:
 kill [options] <pid|name> [...]

Options:
 -a, --all              do not restrict the name-to-pid conversion to processes
                        with the same uid as the present process
 -s, --signal <sig>     send specified signal
 -q, --queue <sig>      use sigqueue(2) rather than kill(2)
 -p, --pid              print pids without signaling them
 -l, --list [=<signal>] list signal names, or convert one to a name
 -L, --table            list signal names and numbers

 -h, --help     display this help and exit
 -V, --version  output version information and exit

For more details see kill(1).
process dm-worker.test already exit
[Sun Apr 28 15:52:04 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/openapi/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
[Sun Apr 28 15:52:06 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/openapi/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
('list_source_by_openapi_success resp=', {u'total': 2, u'data': [{u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3306}, {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-02', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3307}]})
delete_source_with_force_success
delete_source_with_force_success
('list_source_by_openapi_success resp=', {u'total': 0, u'data': []})
('get_task_list resp=', {u'total': 0, u'data': []})
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>TEST OPENAPI: NO SHARD TASK DUMP STATUS SUCCESS
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>START TEST OPENAPI: COMPLEX OPERATION
('create_source1_success resp=', {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'host': u'127.0.0.1', u'user': u'root', u'security': None, u'password': u'123456', u'port': 3306})
('list_source_by_openapi_success resp=', {u'total': 1, u'data': [{u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3306}]})
('create_source1_success resp=', {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-02', u'host': u'127.0.0.1', u'user': u'root', u'security': None, u'password': u'123456', u'port': 3307})
('list_source_by_openapi_success resp=', {u'total': 2, u'data': [{u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3306}, {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-02', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3307}]})
('create_noshard_task_success resp=', {u'check_result': u'fail to check synchronization configuration with type: no errors but some warnings\n detail: {\n\t\t"results": [\n\t\t\t{\n\t\t\t\t"id": 13,\n\t\t\t\t"name": "mysql_version",\n\t\t\t\t"desc": "check whether mysql version is satisfied",\n\t\t\t\t"state": "warn",\n\t\t\t\t"errors": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"severity": "warn",\n\t\t\t\t\t\t"short_error": "version suggested earlier than 8.0.0 but got 8.0.21"\n\t\t\t\t\t}\n\t\t\t\t],\n\t\t\t\t"instruction": "It is recommended that you select a database version that meets the requirements before performing data migration. Otherwise data inconsistency or task exceptions might occur.",\n\t\t\t\t"extra": "address of db instance - 127.0.0.1:3307"\n\t\t\t}\n\t\t],\n\t\t"summary": {\n\t\t\t"passed": true,\n\t\t\t"total": 23,\n\t\t\t"successful": 22,\n\t\t\t"failed": 0,\n\t\t\t"warning": 1\n\t\t}\n\t}', u'task': {u'source_config': {u'source_conf': [{u'source_name': u'mysql-01'}, {u'source_name': u'mysql-02'}]}, u'enhance_online_schema_change': True, u'on_duplicate': u'error', u'table_migrate_rule': [{u'source': {u'table': u'*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}, {u'source': {u'table': u'*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}], u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm-meta', u'task_mode': u'all', u'name': u'test-complex'}})
dmctl test cmd: "query-status test-complex"
got=2 expected=2
('get_task_list resp=', {u'total': 1, u'data': [{u'strict_optimistic_shard_mode': False, u'source_config': {u'incr_migrate_conf': {u'repl_batch': 100, u'repl_threads': 16}, u'full_migrate_conf': {u'data_dir': u'./dumped_data', u'export_threads': 4, u'import_threads': 16}, u'source_conf': [{u'source_name': u'mysql-01'}, {u'source_name': u'mysql-02'}]}, u'enhance_online_schema_change': True, u'on_duplicate': u'replace', u'table_migrate_rule': [{u'source': {u'table': u'*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'schema': u'openapi'}}, {u'source': {u'table': u'*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'schema': u'openapi'}}], u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm-meta', u'task_mode': u'all', u'name': u'test-complex'}]})
dmctl test cmd: "query-status test-complex"
got=2 expected=2
check diff successfully
('get_task_status_success resp=', {u'total': 2, u'data': [{u'source_name': u'mysql-01', u'name': u'test-complex', u'unresolved_ddl_lock_id': u'', u'sync_status': {u'binlog_type': u'remote', u'blocking_ddls': None, u'recent_tps': 0, u'total_events': 5, u'master_binlog_gtid': u'37937a44-0531-11ef-a93f-369273bbfa41:1-33', u'seconds_behind_master': 0, u'master_binlog': u'(dm-it-8ad00964-919c-4e5f-b41c-63b6b7bf8e3c-7klxz-982f1-bin.000001, 6589)', u'syncer_binlog_gtid': u'37937a44-0531-11ef-a93f-369273bbfa41:1-32', u'synced': False, u'total_tps': 0, u'unresolved_groups': None, u'syncer_binlog': u'(dm-it-8ad00964-919c-4e5f-b41c-63b6b7bf8e3c-7klxz-982f1-bin.000001, 6335)'}, u'worker_name': u'worker1', u'unit': u'Sync', u'stage': u'Running'}, {u'source_name': u'mysql-02', u'name': u'test-complex', u'unresolved_ddl_lock_id': u'', u'sync_status': {u'binlog_type': u'remote', u'blocking_ddls': None, u'recent_tps': 0, u'total_events': 5, u'master_binlog_gtid': u'37e89242-0531-11ef-b772-369273bbfa41:1-29', u'seconds_behind_master': 0, u'master_binlog': u'(dm-it-8ad00964-919c-4e5f-b41c-63b6b7bf8e3c-7klxz-982f1-bin.000001, 6441)', u'syncer_binlog_gtid': u'37e89242-0531-11ef-b772-369273bbfa41:1-28', u'synced': False, u'total_tps': 0, u'unresolved_groups': None, u'syncer_binlog': u'(dm-it-8ad00964-919c-4e5f-b41c-63b6b7bf8e3c-7klxz-982f1-bin.000001, 6167)'}, u'worker_name': u'worker2', u'unit': u'Sync', u'stage': u'Running'}]})
check diff successfully
delete_source_with_force_success
delete_source_with_force_success
('list_source_by_openapi_success resp=', {u'total': 0, u'data': []})
('get_task_list resp=', {u'total': 0, u'data': []})
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>TEST OPENAPI: COMPLEX OPERATION SUCCESS
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>START TEST OPENAPI: TEST TASK WITH IGNORE CHECK ITEMS
('create_source1_success resp=', {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'host': u'127.0.0.1', u'user': u'root', u'security': None, u'password': u'123456', u'port': 3306})
('list_source_by_openapi_success resp=', {u'total': 1, u'data': [{u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3306}]})
('get_source_status_success resp=', {u'total': 1, u'data': [{u'worker_name': u'worker1', u'source_name': u'mysql-01'}]})
('create_source1_success resp=', {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-02', u'host': u'127.0.0.1', u'user': u'root', u'security': None, u'password': u'123456', u'port': 3307})
('list_source_by_openapi_success resp=', {u'total': 2, u'data': [{u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3306}, {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-02', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3307}]})
('get_source_status_success resp=', {u'total': 1, u'data': [{u'worker_name': u'worker2', u'source_name': u'mysql-02'}]})
('create_task_with_precheck resp=', {u'check_result': u'pre-check is passed. ', u'task': {u'name': u'test-no-ignore-no-error', u'table_migrate_rule': [{u'source': {u'table': u't*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'table': u't', u'schema': u'openapi'}}, {u'source': {u'table': u't*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'table': u't', u'schema': u'openapi'}}], u'enhance_online_schema_change': True, u'on_duplicate': u'error', u'ignore_checking_items': [u'version', u''], u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm_meta', u'shard_mode': u'pessimistic', u'task_mode': u'all', u'source_config': {u'source_conf': [{u'source_name': u'mysql-01'}, {u'source_name': u'mysql-02'}]}}})
dmctl test cmd: "query-status test-no-ignore-no-error"
got=2 expected=2
delete_task_success
('get_task_list resp=', {u'total': 0, u'data': []})
('create_task_with_precheck resp=', {u'check_result': u'fail to check synchronization configuration with type: no errors but some warnings\n detail: {\n\t\t"results": [\n\t\t\t{\n\t\t\t\t"id": 21,\n\t\t\t\t"name": "sharding table `openapi`.`t` consistency checking",\n\t\t\t\t"desc": "check consistency of sharding table structures",\n\t\t\t\t"state": "warn",\n\t\t\t\t"errors": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"severity": "fail",\n\t\t\t\t\t\t"short_error": "sourceID mysql-01 table {openapi t} of sharding `openapi`.`t` have auto-increment key, please make sure them don\'t conflict in target table!"\n\t\t\t\t\t},\n\t\t\t\t\t{\n\t\t\t\t\t\t"severity": "fail",\n\t\t\t\t\t\t"short_error": "sourceID mysql-02 table {openapi t} of sharding `openapi`.`t` have auto-increment key, please make sure them don\'t conflict in target table!"\n\t\t\t\t\t}\n\t\t\t\t],\n\t\t\t\t"instruction": "If happen conflict, please handle it by yourself. You can refer to https://docs.pingcap.com/tidb-data-migration/stable/shard-merge-best-practices/#handle-conflicts-between-primary-keys-or-unique-indexes-across-multiple-sharded-tables",\n\t\t\t\t"extra": "auto-increment key checking"\n\t\t\t}\n\t\t],\n\t\t"summary": {\n\t\t\t"passed": true,\n\t\t\t"total": 22,\n\t\t\t"successful": 21,\n\t\t\t"failed": 0,\n\t\t\t"warning": 1\n\t\t}\n\t}', u'task': {u'name': u'test-no-ignore-has-warn', u'table_migrate_rule': [{u'source': {u'table': u't*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'table': u't', u'schema': u'openapi'}}, {u'source': {u'table': u't*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'table': u't', u'schema': u'openapi'}}], u'enhance_online_schema_change': True, u'on_duplicate': u'error', u'ignore_checking_items': [u'version', u''], u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm_meta', u'shard_mode': u'pessimistic', u'task_mode': u'all', u'source_config': {u'source_conf': [{u'source_name': u'mysql-01'}, {u'source_name': u'mysql-02'}]}}})
dmctl test cmd: "query-status test-no-ignore-has-warn"
got=2 expected=2
delete_task_success
('get_task_list resp=', {u'total': 0, u'data': []})
('create_task_with_precheck resp=', {u'error_code': 26005, u'error_msg': u'[code=26005:class=dm-master:scope=internal:level=medium], Message: fail to check synchronization configuration with type: check was failed, please see detail\n detail: {\n\t\t"results": [\n\t\t\t{\n\t\t\t\t"id": 21,\n\t\t\t\t"name": "sharding table `openapi`.`t` consistency checking",\n\t\t\t\t"desc": "check consistency of sharding table structures",\n\t\t\t\t"state": "fail",\n\t\t\t\t"errors": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"severity": "fail",\n\t\t\t\t\t\t"short_error": "column length mismatch (self: 3 vs other: 2)",\n\t\t\t\t\t\t"self": "sourceID mysql-01 table {openapi t} columns [id i j]",\n\t\t\t\t\t\t"other": "sourceID mysql-02 table `openapi`.`t` columns [i j]"\n\t\t\t\t\t}\n\t\t\t\t],\n\t\t\t\t"instruction": "please set same table structure for sharding tables",\n\t\t\t\t"extra": "error on sharding `openapi`.`t`"\n\t\t\t}\n\t\t],\n\t\t"summary": {\n\t\t\t"passed": false,\n\t\t\t"total": 22,\n\t\t\t"successful": 21,\n\t\t\t"failed": 1,\n\t\t\t"warning": 0\n\t\t}\n\t}'})
dmctl test cmd: "query-status test-no-ignore-has-error"
got=1 expected=1
('create_task_with_precheck resp=', {u'check_result': u'pre-check is passed. ', u'task': {u'name': u'test-has-ignore-without-error', u'table_migrate_rule': [{u'source': {u'table': u't*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'table': u't', u'schema': u'openapi'}}, {u'source': {u'table': u't*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'table': u't', u'schema': u'openapi'}}], u'enhance_online_schema_change': True, u'on_duplicate': u'error', u'ignore_checking_items': [u'version', u'schema_of_shard_tables'], u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm_meta', u'shard_mode': u'pessimistic', u'task_mode': u'all', u'source_config': {u'source_conf': [{u'source_name': u'mysql-01'}, {u'source_name': u'mysql-02'}]}}})
dmctl test cmd: "query-status test-has-ignore-without-error"
got=2 expected=2
delete_task_success
('get_task_list resp=', {u'total': 0, u'data': []})
delete_source_with_force_success
delete_source_with_force_success
('list_source_by_openapi_success resp=', {u'total': 0, u'data': []})
('get_task_list resp=', {u'total': 0, u'data': []})
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>TEST OPENAPI: TEST TASK WITH IGNORE CHECK ITEMS SUCCESS
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>START TEST OPENAPI: DELETE TASK WITH STOPPED DOWNSTREAM
('create_source1_success resp=', {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'host': u'127.0.0.1', u'user': u'root', u'security': None, u'password': u'123456', u'port': 3306})
('create_source1_success resp=', {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-02', u'host': u'127.0.0.1', u'user': u'root', u'security': None, u'password': u'123456', u'port': 3307})
('list_source_by_openapi_success resp=', {u'total': 2, u'data': [{u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3306}, {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-02', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3307}]})
('create_noshard_task_success resp=', {u'check_result': u'fail to check synchronization configuration with type: no errors but some warnings\n detail: {\n\t\t"results": [\n\t\t\t{\n\t\t\t\t"id": 13,\n\t\t\t\t"name": "mysql_version",\n\t\t\t\t"desc": "check whether mysql version is satisfied",\n\t\t\t\t"state": "warn",\n\t\t\t\t"errors": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"severity": "warn",\n\t\t\t\t\t\t"short_error": "version suggested earlier than 8.0.0 but got 8.0.21"\n\t\t\t\t\t}\n\t\t\t\t],\n\t\t\t\t"instruction": "It is recommended that you select a database version that meets the requirements before performing data migration. Otherwise data inconsistency or task exceptions might occur.",\n\t\t\t\t"extra": "address of db instance - 127.0.0.1:3307"\n\t\t\t}\n\t\t],\n\t\t"summary": {\n\t\t\t"passed": true,\n\t\t\t"total": 23,\n\t\t\t"successful": 22,\n\t\t\t"failed": 0,\n\t\t\t"warning": 1\n\t\t}\n\t}', u'task': {u'source_config': {u'source_conf': [{u'source_name': u'mysql-01'}, {u'source_name': u'mysql-02'}]}, u'enhance_online_schema_change': True, u'on_duplicate': u'error', u'table_migrate_rule': [{u'source': {u'table': u'*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}, {u'source': {u'table': u'*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}], u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm-meta', u'task_mode': u'all', u'name': u'test-no-shard'}})
dmctl test cmd: "query-status test-no-shard"
got=2 expected=2
tidb_server_num tidb-server alive
wait process tidb-server exit...
process tidb-server already exit
('delete_task_failed resp=', {u'error_code': 10001, u'error_msg': u'[code=10001:class=database:scope=downstream:level=high], Message: database driver error, RawCause: dial tcp 127.0.0.1:4000: connect: connection refused, Workaround: Please check the database connection and the database config in configuration file.'})
delete_task_success
('get_task_list resp=', {u'total': 0, u'data': []})
Starting TiDB on port 4000
Verifying TiDB is started...
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	179	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	False	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
ERROR 1396 (HY000) at line 1: Operation CREATE USER failed for 'test'@'%'
delete_source_with_force_success
delete_source_with_force_success
('list_source_by_openapi_success resp=', {u'total': 0, u'data': []})
('get_task_list resp=', {u'total': 0, u'data': []})
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>TEST OPENAPI: DELETE TASK WITH STOPPED DOWNSTREAM SUCCESS
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>START TEST OPENAPI: START TASK WITH CONDITION
('create_source1_success resp=', {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'host': u'127.0.0.1', u'user': u'root', u'security': None, u'password': u'123456', u'port': 3306})
('list_source_by_openapi_success resp=', {u'total': 1, u'data': [{u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3306}]})
('get_source_status_success resp=', {u'total': 1, u'data': [{u'worker_name': u'worker1', u'source_name': u'mysql-01'}]})
('create_source1_success resp=', {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-02', u'host': u'127.0.0.1', u'user': u'root', u'security': None, u'password': u'123456', u'port': 3307})
('list_source_by_openapi_success resp=', {u'total': 2, u'data': [{u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3306}, {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-02', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3307}]})
('get_source_status_success resp=', {u'total': 1, u'data': [{u'worker_name': u'worker2', u'source_name': u'mysql-02'}]})
('create_incremental_task_with_gtid_success resp=', {u'check_result': u'fail to check synchronization configuration with type: no errors but some warnings\n detail: {\n\t\t"results": [\n\t\t\t{\n\t\t\t\t"id": 9,\n\t\t\t\t"name": "mysql_version",\n\t\t\t\t"desc": "check whether mysql version is satisfied",\n\t\t\t\t"state": "warn",\n\t\t\t\t"errors": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"severity": "warn",\n\t\t\t\t\t\t"short_error": "version suggested earlier than 8.0.0 but got 8.0.21"\n\t\t\t\t\t}\n\t\t\t\t],\n\t\t\t\t"instruction": "It is recommended that you select a database version that meets the requirements before performing data migration. Otherwise data inconsistency or task exceptions might occur.",\n\t\t\t\t"extra": "address of db instance - 127.0.0.1:3307"\n\t\t\t}\n\t\t],\n\t\t"summary": {\n\t\t\t"passed": true,\n\t\t\t"total": 18,\n\t\t\t"successful": 17,\n\t\t\t"failed": 0,\n\t\t\t"warning": 1\n\t\t}\n\t}', u'task': {u'source_config': {u'source_conf': [{u'source_name': u'mysql-01'}, {u'source_name': u'mysql-02'}]}, u'enhance_online_schema_change': True, u'on_duplicate': u'error', u'table_migrate_rule': [{u'source': {u'table': u'*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}, {u'source': {u'table': u'*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}], u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm_meta', u'task_mode': u'incremental', u'name': u'incremental_task_no_source_meta'}})
dmctl test cmd: "query-status incremental_task_no_source_meta"
got=2 expected=2
('start_task_failed resp=', {u'error_code': 20022, u'error_msg': u'[code=20022:class=config:scope=internal:level=medium], Message: mysql-instance(mysql-01) must set meta for task-mode incremental, Workaround: Please check the `meta` config in task configuration file.'})
delete_task_success
('get_task_list resp=', {u'total': 0, u'data': []})
('create_incremental_task_with_gtid_success resp=', {u'check_result': u'fail to check synchronization configuration with type: no errors but some warnings\n detail: {\n\t\t"results": [\n\t\t\t{\n\t\t\t\t"id": 10,\n\t\t\t\t"name": "mysql_version",\n\t\t\t\t"desc": "check whether mysql version is satisfied",\n\t\t\t\t"state": "warn",\n\t\t\t\t"errors": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"severity": "warn",\n\t\t\t\t\t\t"short_error": "version suggested earlier than 8.0.0 but got 8.0.21"\n\t\t\t\t\t}\n\t\t\t\t],\n\t\t\t\t"instruction": "It is recommended that you select a database version that meets the requirements before performing data migration. Otherwise data inconsistency or task exceptions might occur.",\n\t\t\t\t"extra": "address of db instance - 127.0.0.1:3307"\n\t\t\t}\n\t\t],\n\t\t"summary": {\n\t\t\t"passed": true,\n\t\t\t"total": 20,\n\t\t\t"successful": 19,\n\t\t\t"failed": 0,\n\t\t\t"warning": 1\n\t\t}\n\t}', u'task': {u'source_config': {u'source_conf': [{u'source_name': u'mysql-01', u'binlog_gtid': u'37937a44-0531-11ef-a93f-369273bbfa41:1-58', u'binlog_name': u'dm-it-8ad00964-919c-4e5f-b41c-63b6b7bf8e3c-7klxz-982f1-bin.000001', u'binlog_pos': 11460}, {u'source_name': u'mysql-02', u'binlog_gtid': u'37e89242-0531-11ef-b772-369273bbfa41:1-54', u'binlog_name': u'dm-it-8ad00964-919c-4e5f-b41c-63b6b7bf8e3c-7klxz-982f1-bin.000001', u'binlog_pos': 11906}]}, u'enhance_online_schema_change': True, u'on_duplicate': u'error', u'table_migrate_rule': [{u'source': {u'table': u'*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}, {u'source': {u'table': u'*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}], u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm_meta', u'task_mode': u'incremental', u'name': u'incremental_task_use_gtid'}})
dmctl test cmd: "query-status incremental_task_use_gtid"
got=2 expected=2
dmctl test cmd: "query-status incremental_task_use_gtid"
got=2 expected=2
delete_task_success
('get_task_list resp=', {u'total': 0, u'data': []})
('create_incremental_task_with_gtid_success resp=', {u'check_result': u'fail to check synchronization configuration with type: no errors but some warnings\n detail: {\n\t\t"results": [\n\t\t\t{\n\t\t\t\t"id": 9,\n\t\t\t\t"name": "mysql_version",\n\t\t\t\t"desc": "check whether mysql version is satisfied",\n\t\t\t\t"state": "warn",\n\t\t\t\t"errors": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"severity": "warn",\n\t\t\t\t\t\t"short_error": "version suggested earlier than 8.0.0 but got 8.0.21"\n\t\t\t\t\t}\n\t\t\t\t],\n\t\t\t\t"instruction": "It is recommended that you select a database version that meets the requirements before performing data migration. Otherwise data inconsistency or task exceptions might occur.",\n\t\t\t\t"extra": "address of db instance - 127.0.0.1:3307"\n\t\t\t}\n\t\t],\n\t\t"summary": {\n\t\t\t"passed": true,\n\t\t\t"total": 18,\n\t\t\t"successful": 17,\n\t\t\t"failed": 0,\n\t\t\t"warning": 1\n\t\t}\n\t}', u'task': {u'source_config': {u'source_conf': [{u'source_name': u'mysql-01'}, {u'source_name': u'mysql-02'}]}, u'enhance_online_schema_change': True, u'on_duplicate': u'error', u'table_migrate_rule': [{u'source': {u'table': u'*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}, {u'source': {u'table': u'*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}], u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm_meta', u'task_mode': u'incremental', u'name': u'incremental_task_use_start_time'}})
dmctl test cmd: "query-status incremental_task_use_start_time"
got=2 expected=2
start_task_with_condition success
dmctl test cmd: "query-status incremental_task_use_start_time"
got=2 expected=2
delete_task_success
('get_task_list resp=', {u'total': 0, u'data': []})
('create_incremental_task_with_gtid_success resp=', {u'check_result': u'fail to check synchronization configuration with type: no errors but some warnings\n detail: {\n\t\t"results": [\n\t\t\t{\n\t\t\t\t"id": 9,\n\t\t\t\t"name": "mysql_version",\n\t\t\t\t"desc": "check whether mysql version is satisfied",\n\t\t\t\t"state": "warn",\n\t\t\t\t"errors": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"severity": "warn",\n\t\t\t\t\t\t"short_error": "version suggested earlier than 8.0.0 but got 8.0.21"\n\t\t\t\t\t}\n\t\t\t\t],\n\t\t\t\t"instruction": "It is recommended that you select a database version that meets the requirements before performing data migration. Otherwise data inconsistency or task exceptions might occur.",\n\t\t\t\t"extra": "address of db instance - 127.0.0.1:3307"\n\t\t\t}\n\t\t],\n\t\t"summary": {\n\t\t\t"passed": true,\n\t\t\t"total": 18,\n\t\t\t"successful": 17,\n\t\t\t"failed": 0,\n\t\t\t"warning": 1\n\t\t}\n\t}', u'task': {u'source_config': {u'source_conf': [{u'source_name': u'mysql-01'}, {u'source_name': u'mysql-02'}]}, u'enhance_online_schema_change': True, u'on_duplicate': u'error', u'table_migrate_rule': [{u'source': {u'table': u'*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}, {u'source': {u'table': u'*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}], u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm_meta', u'task_mode': u'incremental', u'name': u'incremental_task_use_start_time_after_create'}})
dmctl test cmd: "query-status incremental_task_use_start_time_after_create"
got=2 expected=2
start_task_with_condition success
dmctl test cmd: "query-status incremental_task_use_start_time_after_create"
got=2 expected=2
delete_task_success
('get_task_list resp=', {u'total': 0, u'data': []})
('create_incremental_task_with_gtid_success resp=', {u'check_result': u'fail to check synchronization configuration with type: no errors but some warnings\n detail: {\n\t\t"results": [\n\t\t\t{\n\t\t\t\t"id": 10,\n\t\t\t\t"name": "mysql_version",\n\t\t\t\t"desc": "check whether mysql version is satisfied",\n\t\t\t\t"state": "warn",\n\t\t\t\t"errors": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"severity": "warn",\n\t\t\t\t\t\t"short_error": "version suggested earlier than 8.0.0 but got 8.0.21"\n\t\t\t\t\t}\n\t\t\t\t],\n\t\t\t\t"instruction": "It is recommended that you select a database version that meets the requirements before performing data migration. Otherwise data inconsistency or task exceptions might occur.",\n\t\t\t\t"extra": "address of db instance - 127.0.0.1:3307"\n\t\t\t}\n\t\t],\n\t\t"summary": {\n\t\t\t"passed": true,\n\t\t\t"total": 20,\n\t\t\t"successful": 19,\n\t\t\t"failed": 0,\n\t\t\t"warning": 1\n\t\t}\n\t}', u'task': {u'source_config': {u'source_conf': [{u'source_name': u'mysql-01', u'binlog_gtid': u'37937a44-0531-11ef-a93f-369273bbfa41:1-70', u'binlog_name': u'dm-it-8ad00964-919c-4e5f-b41c-63b6b7bf8e3c-7klxz-982f1-bin.000001', u'binlog_pos': 13696}, {u'source_name': u'mysql-02', u'binlog_gtid': u'37e89242-0531-11ef-b772-369273bbfa41:1-66', u'binlog_name': u'dm-it-8ad00964-919c-4e5f-b41c-63b6b7bf8e3c-7klxz-982f1-bin.000001', u'binlog_pos': 14442}]}, u'enhance_online_schema_change': True, u'on_duplicate': u'error', u'table_migrate_rule': [{u'source': {u'table': u'*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}, {u'source': {u'table': u'*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}], u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm_meta', u'task_mode': u'incremental', u'name': u'incremental_task_both_gtid_start_time'}})
dmctl test cmd: "query-status incremental_task_both_gtid_start_time"
got=2 expected=2
start_task_with_condition success
dmctl test cmd: "query-status incremental_task_both_gtid_start_time"
got=2 expected=2
delete_task_success
('get_task_list resp=', {u'total': 0, u'data': []})
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
[Sun Apr 28 15:52:40 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/openapi/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
[Sun Apr 28 15:52:41 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/openapi/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
('list_source_by_openapi_success resp=', {u'total': 2, u'data': [{u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-02', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3307}, {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3306}]})
('create_incremental_task_with_gtid_success resp=', {u'check_result': u'fail to check synchronization configuration with type: no errors but some warnings\n detail: {\n\t\t"results": [\n\t\t\t{\n\t\t\t\t"id": 9,\n\t\t\t\t"name": "mysql_version",\n\t\t\t\t"desc": "check whether mysql version is satisfied",\n\t\t\t\t"state": "warn",\n\t\t\t\t"errors": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"severity": "warn",\n\t\t\t\t\t\t"short_error": "version suggested earlier than 8.0.0 but got 8.0.21"\n\t\t\t\t\t}\n\t\t\t\t],\n\t\t\t\t"instruction": "It is recommended that you select a database version that meets the requirements before performing data migration. Otherwise data inconsistency or task exceptions might occur.",\n\t\t\t\t"extra": "address of db instance - 127.0.0.1:3307"\n\t\t\t}\n\t\t],\n\t\t"summary": {\n\t\t\t"passed": true,\n\t\t\t"total": 18,\n\t\t\t"successful": 17,\n\t\t\t"failed": 0,\n\t\t\t"warning": 1\n\t\t}\n\t}', u'task': {u'source_config': {u'source_conf': [{u'source_name': u'mysql-01'}, {u'source_name': u'mysql-02'}]}, u'enhance_online_schema_change': True, u'on_duplicate': u'error', u'table_migrate_rule': [{u'source': {u'table': u'*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}, {u'source': {u'table': u'*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}], u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm_meta', u'task_mode': u'incremental', u'name': u'incremental_task_no_duration_but_error'}})
dmctl test cmd: "query-status incremental_task_no_duration_but_error"
got=2 expected=2
start_task_with_condition success
dmctl test cmd: "query-status incremental_task_no_duration_but_error"
got=2 expected=2
start_task_with_condition success
run tidb sql failed 1-th time, retry later
delete_task_success
('get_task_list resp=', {u'total': 0, u'data': []})
delete_source_with_force_success
delete_source_with_force_success
('list_source_by_openapi_success resp=', {u'total': 0, u'data': []})
('get_task_list resp=', {u'total': 0, u'data': []})
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>TEST OPENAPI: START TASK WITH CONDITION SUCCESS
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>START TEST OPENAPI: STOP TASK WITH CONDITION
('create_source1_success resp=', {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'host': u'127.0.0.1', u'user': u'root', u'security': None, u'password': u'123456', u'port': 3306})
('list_source_by_openapi_success resp=', {u'total': 1, u'data': [{u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3306}]})
('get_source_status_success resp=', {u'total': 1, u'data': [{u'worker_name': u'worker1', u'source_name': u'mysql-01'}]})
('create_source1_success resp=', {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-02', u'host': u'127.0.0.1', u'user': u'root', u'security': None, u'password': u'123456', u'port': 3307})
('list_source_by_openapi_success resp=', {u'total': 2, u'data': [{u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3306}, {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-02', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3307}]})
('get_source_status_success resp=', {u'total': 1, u'data': [{u'worker_name': u'worker2', u'source_name': u'mysql-02'}]})
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
[Sun Apr 28 15:52:53 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/openapi/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
[Sun Apr 28 15:52:54 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/openapi/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
('create_noshard_task_success resp=', {u'check_result': u'fail to check synchronization configuration with type: no errors but some warnings\n detail: {\n\t\t"results": [\n\t\t\t{\n\t\t\t\t"id": 13,\n\t\t\t\t"name": "mysql_version",\n\t\t\t\t"desc": "check whether mysql version is satisfied",\n\t\t\t\t"state": "warn",\n\t\t\t\t"errors": [\n\t\t\t\t\t{\n\t\t\t\t\t\t"severity": "warn",\n\t\t\t\t\t\t"short_error": "version suggested earlier than 8.0.0 but got 8.0.21"\n\t\t\t\t\t}\n\t\t\t\t],\n\t\t\t\t"instruction": "It is recommended that you select a database version that meets the requirements before performing data migration. Otherwise data inconsistency or task exceptions might occur.",\n\t\t\t\t"extra": "address of db instance - 127.0.0.1:3307"\n\t\t\t}\n\t\t],\n\t\t"summary": {\n\t\t\t"passed": true,\n\t\t\t"total": 23,\n\t\t\t"successful": 22,\n\t\t\t"failed": 0,\n\t\t\t"warning": 1\n\t\t}\n\t}', u'task': {u'source_config': {u'source_conf': [{u'source_name': u'mysql-01'}, {u'source_name': u'mysql-02'}]}, u'enhance_online_schema_change': True, u'on_duplicate': u'error', u'table_migrate_rule': [{u'source': {u'table': u'*', u'source_name': u'mysql-01', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}, {u'source': {u'table': u'*', u'source_name': u'mysql-02', u'schema': u'openapi'}, u'target': {u'table': u'', u'schema': u'openapi'}}], u'target_config': {u'user': u'root', u'host': u'127.0.0.1', u'password': u'', u'port': 4000, u'security': None}, u'meta_schema': u'dm-meta', u'task_mode': u'all', u'name': u'test_wait_time_on_stop'}})
dmctl test cmd: "query-status test_wait_time_on_stop"
got=2 expected=2
dmctl test cmd: "query-status test_wait_time_on_stop"
got=2 expected=2
check diff successfully
error check
wait process dm-worker.test exit...
process dm-worker.test already exit
[Sun Apr 28 15:52:57 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/openapi/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
[Sun Apr 28 15:52:58 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/openapi/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
delete_task_success
('get_task_list resp=', {u'total': 0, u'data': []})
delete_source_with_force_success
delete_source_with_force_success
('list_source_by_openapi_success resp=', {u'total': 0, u'data': []})
('get_task_list resp=', {u'total': 0, u'data': []})
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>TEST OPENAPI: START TASK WITH CONDITION SUCCESS
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>START TEST OPENAPI: REVERSE HTTPS
2 dm-master alive
2 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
wait process dm-master.test exit...
wait process dm-master.test exit...
wait process dm-master.test exit...
wait process dm-master.test exit...
wait process dm-master.test exit...
wait process dm-master.test exit...
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Sun Apr 28 15:53:09 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /tmp/dm_test/openapi/dm-master1.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8261 is alive
[Sun Apr 28 15:53:10 CST 2024] <<<<<< START DM-MASTER on port 8361, config: /tmp/dm_test/openapi/dm-master2.toml >>>>>>
wait for rpc addr 127.0.0.1:8361 alive the 1-th time
wait for rpc addr 127.0.0.1:8361 alive the 2-th time
rpc addr 127.0.0.1:8361 is alive
[Sun Apr 28 15:53:20 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /tmp/dm_test/openapi/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
[Sun Apr 28 15:53:21 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /tmp/dm_test/openapi/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
('create_source_success_https resp=', {u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'host': u'127.0.0.1', u'user': u'root', u'security': None, u'password': u'123456', u'port': 3306})
('list_source_success_https resp=', {u'total': 1, u'data': [{u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3306}]})
('list_source_with_reverse_https resp=', {u'total': 1, u'data': [{u'enable_gtid': False, u'enable': True, u'source_name': u'mysql-01', u'security': None, u'relay_config': {u'relay_binlog_name': u'', u'relay_dir': u'relay-dir', u'relay_binlog_gtid': u'', u'enable_relay': False}, u'purge': {u'remain_space': 15, u'expires': 0, u'interval': 3600}, u'host': u'127.0.0.1', u'user': u'root', u'flavor': u'mysql', u'password': u'******', u'port': 3306}]})
2 dm-master alive
2 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Sun Apr 28 15:53:27 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/openapi/conf/dm-master1.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8261 is alive
[Sun Apr 28 15:53:29 CST 2024] <<<<<< START DM-MASTER on port 8361, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/openapi/conf/dm-master2.toml >>>>>>
wait for rpc addr 127.0.0.1:8361 alive the 1-th time
wait for rpc addr 127.0.0.1:8361 alive the 2-th time
rpc addr 127.0.0.1:8361 is alive
[Sun Apr 28 15:53:38 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/openapi/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
[Sun Apr 28 15:53:39 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/openapi/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>TEST OPENAPI: REVERSE HTTPS
('list_master_success resp=', {u'total': 2, u'data': [{u'name': u'master1', u'addr': u'http://127.0.0.1:8291', u'alive': True, u'leader': True}, {u'name': u'master2', u'addr': u'http://127.0.0.1:8292', u'alive': True, u'leader': False}]})
('list_worker_success resp=', {u'total': 2, u'data': [{u'bound_source_name': u'', u'bound_stage': u'free', u'addr': u'127.0.0.1:8262', u'name': u'worker1'}, {u'bound_source_name': u'', u'bound_stage': u'free', u'addr': u'127.0.0.1:8263', u'name': u'worker2'}]})
('delete_master_failed resp=', {u'error_code': 0, u'error_msg': u'etcdserver: unhealthy cluster'}, 'retry cnt=', 0)
delete_master_with_retry_success
('list_master_success resp=', {u'total': 1, u'data': [{u'name': u'master1', u'addr': u'http://127.0.0.1:8291', u'alive': True, u'leader': True}]})
('delete_worker_failed resp=', {u'error_code': 46005, u'error_msg': u'[code=46005:class=scheduler:scope=internal:level=medium], Message: dm-worker with name worker1 is still online, Workaround: Please shut it down first.'})
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
delete_worker_with_retry_success
('list_worker_success resp=', {u'total': 1, u'data': [{u'bound_source_name': u'', u'bound_stage': u'offline', u'addr': u'127.0.0.1:8263', u'name': u'worker2'}]})
2 dm-master alive
0 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Sun Apr 28 15:53:54 CST 2024] <<<<<< test case openapi success! >>>>>>
start running case: [s3_dumpling_lightning] script: [/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/s3_dumpling_lightning/run.sh]
Running test /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/s3_dumpling_lightning/run.sh...
Verbose mode = false
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
Starting PD...
Release Version: v7.5.1-5-g584533652
Edition: Community
Git Commit Hash: 58453365285465cd90bc4472cff2bad7ce4d764b
Git Branch: release-7.5
UTC Build Time:  2024-04-03 10:04:14
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0curl: (7) Failed connect to 127.0.0.1:2379; Connection refused
2024-04-28 15:53:54.901012 W | pkg/fileutil: check file permission: directory "/tmp/dm_test/s3_dumpling_lightning.downstream/pd" exist, but the permission is "drwxr-xr-x". The recommended permission is "-rwx------" to prevent possible unprivileged access to the data.
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   107  100   107    0     0  88429      0 --:--:-- --:--:-- --:--:--  104k
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   120  100   120    0     0   104k      0 --:--:-- --:--:-- --:--:--  117k
  "is_initialized": true,
Starting TiDB...
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0curl: (7) Failed connect to 127.0.0.1:10080; Connection refused
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0curl: (7) Failed connect to 127.0.0.1:10080; Connection refused
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   117  100   117    0     0   180k      0 --:--:-- --:--:-- --:--:--  114k
{"connections":0,"version":"8.0.11-TiDB-v7.5.1-44-g0359bbcf43","git_hash":"0359bbcf434fd14d0a4654f35103005de875c7d1"}process minio already exit
* About to connect() to 127.0.0.1 port 8688 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8688; Connection refused
* Closing connection 0

 You are running an older version of MinIO released 3 years ago 
 Update: Run `mc admin update` 


Attempting encryption of all config, IAM users and policies on MinIO backend
Endpoint:  http://127.0.0.1:8688

Browser Access:
   http://127.0.0.1:8688

Object API (Amazon S3 compatible):
   Go:         https://docs.min.io/docs/golang-client-quickstart-guide
   Java:       https://docs.min.io/docs/java-client-quickstart-guide
   Python:     https://docs.min.io/docs/python-client-quickstart-guide
   JavaScript: https://docs.min.io/docs/javascript-client-quickstart-guide
   .NET:       https://docs.min.io/docs/dotnet-client-quickstart-guide
* About to connect() to 127.0.0.1 port 8688 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8688 (#0)
> GET / HTTP/1.1
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8688
> Accept: */*
> 
< HTTP/1.1 403 Forbidden
< Accept-Ranges: bytes
< Content-Length: 226
< Content-Security-Policy: block-all-mixed-content
< Content-Type: application/xml
< Server: MinIO/RELEASE.2020-07-27T18-37-02Z
< Vary: Origin
< X-Amz-Request-Id: 17CA62723C6558D9
< X-Xss-Protection: 1; mode=block
< Date: Sun, 28 Apr 2024 07:54:10 GMT
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact

Usage:
 kill [options] <pid|name> [...]

Options:
 -a, --all              do not restrict the name-to-pid conversion to processes
                        with the same uid as the present process
 -s, --signal <sig>     send specified signal
 -q, --queue <sig>      use sigqueue(2) rather than kill(2)
 -p, --pid              print pids without signaling them
 -l, --list [=<signal>] list signal names, or convert one to a name
 -L, --table            list signal names and numbers

 -h, --help     display this help and exit
 -V, --version  output version information and exit

For more details see kill(1).
process dm-master.test already exit

Usage:
 kill [options] <pid|name> [...]

Options:
 -a, --all              do not restrict the name-to-pid conversion to processes
                        with the same uid as the present process
 -s, --signal <sig>     send specified signal
 -q, --queue <sig>      use sigqueue(2) rather than kill(2)
 -p, --pid              print pids without signaling them
 -l, --list [=<signal>] list signal names, or convert one to a name
 -L, --table            list signal names and numbers

 -h, --help     display this help and exit
 -V, --version  output version information and exit

For more details see kill(1).
process dm-worker.test already exit
[Sun Apr 28 15:54:10 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/s3_dumpling_lightning/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8261 is alive
[Sun Apr 28 15:54:12 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/s3_dumpling_lightning/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
[Sun Apr 28 15:54:13 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/s3_dumpling_lightning/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/s3_dumpling_lightning/source1.yaml"
dmctl test cmd: "operate-source create /tmp/dm_test/s3_dumpling_lightning/source2.yaml"
prepare source data
start task
dmctl test cmd: "start-task /tmp/dm_test/s3_dumpling_lightning/dm-task.yaml --remove-meta"
check task result
run tidb sql failed 1-th time, retry later
run tidb sql failed 2-th time, retry later
run tidb sql failed 3-th time, retry later
run tidb sql failed 1-th time, retry later
run tidb sql failed 2-th time, retry later
run tidb sql failed 3-th time, retry later
wait process minio exit...
process minio already exit
/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/s3_dumpling_lightning/run.sh: line 49: 63376 Killed                  bin/minio server --address $S3_ENDPOINT "$s3_DBPATH"
run s3 test with check dump files success
process minio already exit
* About to connect() to 127.0.0.1 port 8688 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8688; Connection refused
* Closing connection 0

 You are running an older version of MinIO released 3 years ago 
 Update: Run `mc admin update` 


Attempting encryption of all config, IAM users and policies on MinIO backend
Endpoint:  http://127.0.0.1:8688

Browser Access:
   http://127.0.0.1:8688

Object API (Amazon S3 compatible):
   Go:         https://docs.min.io/docs/golang-client-quickstart-guide
   Java:       https://docs.min.io/docs/java-client-quickstart-guide
   Python:     https://docs.min.io/docs/python-client-quickstart-guide
   JavaScript: https://docs.min.io/docs/javascript-client-quickstart-guide
   .NET:       https://docs.min.io/docs/dotnet-client-quickstart-guide
* About to connect() to 127.0.0.1 port 8688 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8688 (#0)
> GET / HTTP/1.1
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8688
> Accept: */*
> 
< HTTP/1.1 403 Forbidden
< Accept-Ranges: bytes
< Content-Length: 226
< Content-Security-Policy: block-all-mixed-content
< Content-Type: application/xml
< Server: MinIO/RELEASE.2020-07-27T18-37-02Z
< Vary: Origin
< X-Amz-Request-Id: 17CA627747E7CDEA
< X-Xss-Protection: 1; mode=block
< Date: Sun, 28 Apr 2024 07:54:32 GMT
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
[Sun Apr 28 15:54:39 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/s3_dumpling_lightning/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8261 is alive
[Sun Apr 28 15:54:40 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/s3_dumpling_lightning/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
[Sun Apr 28 15:54:41 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/s3_dumpling_lightning/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/s3_dumpling_lightning/source1.yaml"
dmctl test cmd: "operate-source create /tmp/dm_test/s3_dumpling_lightning/source2.yaml"
prepare source data
start task
dmctl test cmd: "start-task /tmp/dm_test/s3_dumpling_lightning/dm-task.yaml --remove-meta"
check task result
run tidb sql failed 1-th time, retry later
run tidb sql failed 2-th time, retry later
run tidb sql failed 3-th time, retry later
run tidb sql failed 1-th time, retry later
run tidb sql failed 2-th time, retry later
run tidb sql failed 3-th time, retry later
wait process minio exit...
process minio already exit
/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/s3_dumpling_lightning/run.sh: line 49: 63997 Killed                  bin/minio server --address $S3_ENDPOINT "$s3_DBPATH"
run s3 test without check dump files success
process minio already exit
* About to connect() to 127.0.0.1 port 8688 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8688; Connection refused
* Closing connection 0

 You are running an older version of MinIO released 3 years ago 
 Update: Run `mc admin update` 


Attempting encryption of all config, IAM users and policies on MinIO backend
Endpoint:  http://127.0.0.1:8688

Browser Access:
   http://127.0.0.1:8688

Object API (Amazon S3 compatible):
   Go:         https://docs.min.io/docs/golang-client-quickstart-guide
   Java:       https://docs.min.io/docs/java-client-quickstart-guide
   Python:     https://docs.min.io/docs/python-client-quickstart-guide
   JavaScript: https://docs.min.io/docs/javascript-client-quickstart-guide
   .NET:       https://docs.min.io/docs/dotnet-client-quickstart-guide
* About to connect() to 127.0.0.1 port 8688 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8688 (#0)
> GET / HTTP/1.1
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8688
> Accept: */*
> 
< HTTP/1.1 403 Forbidden
< Accept-Ranges: bytes
< Content-Length: 226
< Content-Security-Policy: block-all-mixed-content
< Content-Type: application/xml
< Server: MinIO/RELEASE.2020-07-27T18-37-02Z
< Vary: Origin
< X-Amz-Request-Id: 17CA627E02B95B90
< X-Xss-Protection: 1; mode=block
< Date: Sun, 28 Apr 2024 07:55:01 GMT
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
[Sun Apr 28 15:55:08 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/s3_dumpling_lightning/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8261 is alive
[Sun Apr 28 15:55:09 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/s3_dumpling_lightning/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
[Sun Apr 28 15:55:10 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/s3_dumpling_lightning/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/s3_dumpling_lightning/source1.yaml"
dmctl test cmd: "operate-source create /tmp/dm_test/s3_dumpling_lightning/source2.yaml"
prepare source data
start task
dmctl test cmd: "start-task /tmp/dm_test/s3_dumpling_lightning/dm-task.yaml --remove-meta"
check task result
run tidb sql failed 1-th time, retry later
run tidb sql failed 2-th time, retry later
run tidb sql failed 3-th time, retry later
run tidb sql failed 1-th time, retry later
run tidb sql failed 2-th time, retry later
run tidb sql failed 3-th time, retry later
wait process minio exit...
process minio already exit
/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/s3_dumpling_lightning/run.sh: line 49: 64659 Killed                  bin/minio server --address $S3_ENDPOINT "$s3_DBPATH"
run s3 test with special task-name and check dump files success
process minio already exit
* About to connect() to 127.0.0.1 port 8688 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8688; Connection refused
* Closing connection 0

 You are running an older version of MinIO released 3 years ago 
 Update: Run `mc admin update` 


Attempting encryption of all config, IAM users and policies on MinIO backend
Endpoint:  http://127.0.0.1:8688

Browser Access:
   http://127.0.0.1:8688

Object API (Amazon S3 compatible):
   Go:         https://docs.min.io/docs/golang-client-quickstart-guide
   Java:       https://docs.min.io/docs/java-client-quickstart-guide
   Python:     https://docs.min.io/docs/python-client-quickstart-guide
   JavaScript: https://docs.min.io/docs/javascript-client-quickstart-guide
   .NET:       https://docs.min.io/docs/dotnet-client-quickstart-guide
* About to connect() to 127.0.0.1 port 8688 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8688 (#0)
> GET / HTTP/1.1
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8688
> Accept: */*
> 
< HTTP/1.1 403 Forbidden
< Accept-Ranges: bytes
< Content-Length: 226
< Content-Security-Policy: block-all-mixed-content
< Content-Type: application/xml
< Server: MinIO/RELEASE.2020-07-27T18-37-02Z
< Vary: Origin
< X-Amz-Request-Id: 17CA6284BDEE1C7F
< X-Xss-Protection: 1; mode=block
< Date: Sun, 28 Apr 2024 07:55:30 GMT
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
[Sun Apr 28 15:55:37 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/s3_dumpling_lightning/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
rpc addr 127.0.0.1:8261 is alive
[Sun Apr 28 15:55:39 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/s3_dumpling_lightning/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
[Sun Apr 28 15:55:40 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/s3_dumpling_lightning/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/s3_dumpling_lightning/source1.yaml"
dmctl test cmd: "operate-source create /tmp/dm_test/s3_dumpling_lightning/source2.yaml"
prepare source data
start task
dmctl test cmd: "start-task /tmp/dm_test/s3_dumpling_lightning/dm-task.yaml --remove-meta"
check task result
run tidb sql failed 1-th time, retry later
run tidb sql failed 2-th time, retry later
run tidb sql failed 3-th time, retry later
run tidb sql failed 1-th time, retry later
run tidb sql failed 2-th time, retry later
run tidb sql failed 3-th time, retry later
wait process minio exit...
process minio already exit
/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/s3_dumpling_lightning/run.sh: line 49: 65335 Killed                  bin/minio server --address $S3_ENDPOINT "$s3_DBPATH"
run s3 test with special task-name and without check dump files success
process minio already exit
* About to connect() to 127.0.0.1 port 8688 (#0)
*   Trying 127.0.0.1...
* Connection refused
* Failed connect to 127.0.0.1:8688; Connection refused
* Closing connection 0

 You are running an older version of MinIO released 3 years ago 
 Update: Run `mc admin update` 


Attempting encryption of all config, IAM users and policies on MinIO backend
Endpoint:  http://127.0.0.1:8688

Browser Access:
   http://127.0.0.1:8688

Object API (Amazon S3 compatible):
   Go:         https://docs.min.io/docs/golang-client-quickstart-guide
   Java:       https://docs.min.io/docs/java-client-quickstart-guide
   Python:     https://docs.min.io/docs/python-client-quickstart-guide
   JavaScript: https://docs.min.io/docs/javascript-client-quickstart-guide
   .NET:       https://docs.min.io/docs/dotnet-client-quickstart-guide
* About to connect() to 127.0.0.1 port 8688 (#0)
*   Trying 127.0.0.1...
* Connected to 127.0.0.1 (127.0.0.1) port 8688 (#0)
> GET / HTTP/1.1
> User-Agent: curl/7.29.0
> Host: 127.0.0.1:8688
> Accept: */*
> 
< HTTP/1.1 403 Forbidden
< Accept-Ranges: bytes
< Content-Length: 226
< Content-Security-Policy: block-all-mixed-content
< Content-Type: application/xml
< Server: MinIO/RELEASE.2020-07-27T18-37-02Z
< Vary: Origin
< X-Amz-Request-Id: 17CA628BBBB5441D
< X-Xss-Protection: 1; mode=block
< Date: Sun, 28 Apr 2024 07:56:00 GMT
< 
{ [data not shown]
* Connection #0 to host 127.0.0.1 left intact
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
[Sun Apr 28 15:56:07 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/s3_dumpling_lightning/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
rpc addr 127.0.0.1:8261 is alive
[Sun Apr 28 15:56:09 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/s3_dumpling_lightning/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
[Sun Apr 28 15:56:10 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/s3_dumpling_lightning/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/s3_dumpling_lightning/source1.yaml"
dmctl test cmd: "operate-source create /tmp/dm_test/s3_dumpling_lightning/source2.yaml"
prepare source data
start task
dmctl test cmd: "start-task /tmp/dm_test/s3_dumpling_lightning/dm-task.yaml"
error check
wait process minio exit...
process minio already exit
/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/s3_dumpling_lightning/run.sh: line 49: 66012 Killed                  bin/minio server --address $S3_ENDPOINT "$s3_DBPATH"
run s3 test error check success
Starting TiDB on port 4000
Verifying TiDB is started...
ERROR 2013 (HY000): Lost connection to MySQL server at 'reading initial communication packet', system error: 104
VARIABLE_NAME	VARIABLE_VALUE	COMMENT
bootstrapped	True	Bootstrap flag. Do not delete.
tidb_server_version	179	Bootstrap version. Do not delete.
system_tz	Asia/Shanghai	TiDB Global System Timezone.
new_collation_enabled	True	If the new collations are enabled. Do not edit it.
ddl_table_version	3	DDL Table Version. Do not delete.
1 dm-master alive
0 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Sun Apr 28 15:56:27 CST 2024] <<<<<< test case s3_dumpling_lightning success! >>>>>>
start running case: [sequence_sharding_optimistic] script: [/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sequence_sharding_optimistic/run.sh]
Running test /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sequence_sharding_optimistic/run.sh...
Verbose mode = false
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Sun Apr 28 15:56:28 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sequence_sharding_optimistic/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8261 is alive
[Sun Apr 28 15:56:29 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sequence_sharding_optimistic/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
[Sun Apr 28 15:56:30 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sequence_sharding_optimistic/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   341  100   216  100   125  64923  37571 --:--:-- --:--:-- --:--:-- 72000
dmctl test cmd: "operate-source create /tmp/dm_test/sequence_sharding_optimistic/source1.yaml"
dmctl test cmd: "operate-source create /tmp/dm_test/sequence_sharding_optimistic/source2.yaml"
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   407  100   282  100   125  62279  27606 --:--:-- --:--:-- --:--:-- 70500
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sequence_sharding_optimistic/conf/dm-task.yaml --remove-meta"
check diff successfully
ERROR 1146 (42S02) at line 1: Table 'sharding_seq_tmp.t1' doesn't exist
run tidb sql failed 1-th time, retry later
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   398  100   273  100   125  70670  32358 --:--:-- --:--:-- --:--:-- 91000
dmctl test cmd: "pause-task sequence_sharding_optimistic"
got=3 expected=3
dmctl test cmd: "query-status sequence_sharding_optimistic"
got=2 expected=2
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   512  100   387  100   125   106k  35250 --:--:-- --:--:-- --:--:--  125k
dmctl test cmd: "resume-task sequence_sharding_optimistic"
got=3 expected=3
dmctl test cmd: "query-status sequence_sharding_optimistic"
got=3 expected=3
restart dm-worker1
wait process dm-worker1 exit...
wait process dm-worker1 exit...
process dm-worker1 already exit
[Sun Apr 28 15:56:39 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sequence_sharding_optimistic/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "shard-ddl-lock unlock non-exist-task-`test_db`.`test_table`"
dmctl test cmd: "query-status sequence_sharding_optimistic"
got=1 expected=1
dmctl test cmd: "resume-task sequence_sharding_optimistic"
dmctl test cmd: "query-status sequence_sharding_optimistic"
got=1 expected=1
dmctl test cmd: "resume-task sequence_sharding_optimistic"
dmctl test cmd: "query-status sequence_sharding_optimistic"
got=1 expected=1
dmctl test cmd: "resume-task sequence_sharding_optimistic"
dmctl test cmd: "query-status sequence_sharding_optimistic"
got=1 expected=1
dmctl test cmd: "resume-task sequence_sharding_optimistic"
dmctl test cmd: "query-status sequence_sharding_optimistic"
got=1 expected=1
dmctl test cmd: "resume-task sequence_sharding_optimistic"
got=3 expected=3
check diff successfully
dmctl test cmd: "pause-task sequence_sharding_optimistic"
got=3 expected=3
dmctl test cmd: "query-status sequence_sharding_optimistic"
got=2 expected=2
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   508  100   383  100   125  94942  30986 --:--:-- --:--:-- --:--:--  124k
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   291  100   166  100   125  51440  38735 --:--:-- --:--:-- --:--:-- 55333
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   538  100   370  100   168  68493  31099 --:--:-- --:--:-- --:--:-- 74000
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   508  100   383  100   125   102k  34115 --:--:-- --:--:-- --:--:--  124k
dmctl test cmd: "binlog-schema list -s mysql-replica-01,mysql-replica-02 sequence_sharding_optimistic sharding_seq_opt t2"
dmctl test cmd: "binlog-schema delete -s mysql-replica-01 sequence_sharding_optimistic sharding_seq_opt t2"
dmctl test cmd: "binlog-schema update -s mysql-replica-01 sequence_sharding_optimistic sharding_seq_opt t1 /tmp/dm_test/sequence_sharding_optimistic/schema.sql"
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   508  100   383  100   125   107k  35898 --:--:-- --:--:-- --:--:--  124k
{
  "result": true,
  "msg": "",
  "sources": [
    {
      "result": true,
      "msg": "CREATE TABLE `t1` ( `id` bigint(20) NOT NULL, `c2` varchar(20) DEFAULT NULL, `c3` bigint(11) DEFAULT NULL, PRIMARY KEY (`id`) /*T![clustered_index] CLUSTERED */) ENGINE=InnoDB DEFAULT CHARSET=latin1 COLLATE=latin1_bin",
      "source": "mysql-replica-01",
      "worker": "worker1"
    }
  ]
}dmctl test cmd: "resume-task sequence_sharding_optimistic"
got=3 expected=3
dmctl test cmd: "query-status sequence_sharding_optimistic"
got=3 expected=3
check diff successfully
1 dm-master alive
2 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Sun Apr 28 15:56:48 CST 2024] <<<<<< test case sequence_sharding_optimistic success! >>>>>>
start running case: [sequence_sharding_removemeta] script: [/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sequence_sharding_removemeta/run.sh]
Running test /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sequence_sharding_removemeta/run.sh...
Verbose mode = false
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Sun Apr 28 15:56:48 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sequence_sharding_removemeta/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
rpc addr 127.0.0.1:8261 is alive
[Sun Apr 28 15:56:51 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sequence_sharding_removemeta/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
[Sun Apr 28 15:56:52 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sequence_sharding_removemeta/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/sequence_sharding_removemeta/source1.yaml"
dmctl test cmd: "operate-source create /tmp/dm_test/sequence_sharding_removemeta/source2.yaml"
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sequence_sharding_removemeta/conf/dm-task.yaml "
check diff successfully
dmctl test cmd: "shard-ddl-lock"
got=1 expected=1
got=1 expected=1
dmctl test cmd: "query-status sequence_sharding_removemeta"
got=0 expected=0
got=0 expected=0
dmctl test cmd: "stop-task sequence_sharding_removemeta"
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sequence_sharding_removemeta/conf/dm-task.yaml --remove-meta"
dmctl test cmd: "shard-ddl-lock"
got=1 expected=1
check diff successfully
dmctl test cmd: "shard-ddl-lock"
got=1 expected=1
got=1 expected=1
dmctl test cmd: "query-status sequence_sharding_removemeta"
got=1 expected=1
dmctl test cmd: "stop-task sequence_sharding_removemeta"
dmctl test cmd: "shard-ddl-lock unlock sequence_sharding_removemeta-`sharding_target3`.`t_target`"
dmctl test cmd: "shard-ddl-lock"
got=1 expected=1
1 dm-master alive
2 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Sun Apr 28 15:57:20 CST 2024] <<<<<< test case sequence_sharding_removemeta success! >>>>>>
start running case: [shardddl_optimistic] script: [/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl_optimistic/run.sh]
Running test /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl_optimistic/run.sh...
Verbose mode = false
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Sun Apr 28 15:57:20 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl_optimistic/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8261 is alive
[Sun Apr 28 15:57:21 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl_optimistic/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/shardddl_optimistic/source1.yaml"
[Sun Apr 28 15:57:22 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl_optimistic/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/shardddl_optimistic/source2.yaml"
[Sun Apr 28 15:57:25 CST 2024] <<<<<< start DM-DIFFERENT_SCHEMA_FULL optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl_optimistic/conf/double-source-optimistic.yaml --remove-meta"
dmctl test cmd: "query-status test"
got=2 expected=2
check diff successfully
dmctl test cmd: "stop-task test"
[Sun Apr 28 15:57:27 CST 2024] <<<<<< finish DM-DIFFERENT_SCHEMA_FULL optimistic >>>>>>
[Sun Apr 28 15:57:27 CST 2024] <<<<<< start DM-DIFFERENT_SCHEMA_INCREMENTAL optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl_optimistic/conf/double-source-optimistic.yaml --remove-meta"
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "pause-task test"
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   245  100   245    0     0  73529      0 --:--:-- --:--:-- --:--:-- 81666
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   275  100   275    0     0   112k      0 --:--:-- --:--:-- --:--:--  134k
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   268  100   268    0     0  93379      0 --:--:-- --:--:-- --:--:--  130k
  % Total    % Received % Xferd  Average Speed   Time    Time     Time  Current
                                 Dload  Upload   Total   Spent    Left  Speed

  0     0    0     0    0     0      0      0 --:--:-- --:--:-- --:--:--     0
100   298  100   298    0     0   114k      0 --:--:-- --:--:-- --:--:--  145k
dmctl test cmd: "stop-task test"
dmctl test cmd: "start-task /tmp/dm_test/shardddl_optimistic/task.yaml --remove-meta"
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "operate-schema set -s mysql-replica-01 test -d shardddl1 -t tb1 /tmp/dm_test/shardddl_optimistic/schema11.sql"
dmctl test cmd: "operate-schema set -s mysql-replica-01 test -d shardddl1 -t tb2 /tmp/dm_test/shardddl_optimistic/schema12.sql"
dmctl test cmd: "operate-schema set -s mysql-replica-02 test -d shardddl1 -t tb1 /tmp/dm_test/shardddl_optimistic/schema21.sql"
dmctl test cmd: "operate-schema set -s mysql-replica-02 test -d shardddl1 -t tb2 /tmp/dm_test/shardddl_optimistic/schema22.sql"
dmctl test cmd: "resume-task test"
run tidb sql failed 1-th time, retry later
check diff successfully
dmctl test cmd: "stop-task test"
[Sun Apr 28 15:57:34 CST 2024] <<<<<< finish DM-DIFFERENT_SCHEMA_INCREMENTAL optimistic >>>>>>
[Sun Apr 28 15:57:34 CST 2024] <<<<<< start DM-RESTART_TASK_MASTER_WORKER optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl_optimistic/conf/double-source-optimistic.yaml --remove-meta"
dmctl test cmd: "query-status test"
got=2 expected=2
check log contain failed 1-th time, retry later
restart worker1
restart dm-worker1
wait process worker1 exit...
wait process worker1 exit...
process worker1 already exit
[Sun Apr 28 15:57:39 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl_optimistic/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
check log contain failed 1-th time, retry later
restart worker2
restart dm-worker2
wait process worker2 exit...
wait process worker2 exit...
process worker2 already exit
[Sun Apr 28 15:57:44 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl_optimistic/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
check log contain failed 1-th time, retry later
restart worker2
restart dm-worker2
wait process worker2 exit...
wait process worker2 exit...
process worker2 already exit
[Sun Apr 28 15:57:49 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl_optimistic/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
check log contain failed 1-th time, retry later
restart master
restart dm-master
wait process dm-master exit...
wait process dm-master exit...
process dm-master already exit
[Sun Apr 28 15:57:57 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl_optimistic/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
rpc addr 127.0.0.1:8261 is alive
check diff successfully
dmctl test cmd: "stop-task test"
[Sun Apr 28 15:58:00 CST 2024] <<<<<< finish DM-RESTART_TASK_MASTER_WORKER optimistic >>>>>>
[Sun Apr 28 15:58:00 CST 2024] <<<<<< start DM-STOP_TASK_FOR_A_SOURCE optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl_optimistic/conf/double-source-optimistic.yaml --remove-meta"
dmctl test cmd: "query-status test"
got=2 expected=2
run tidb sql failed 1-th time, retry later
dmctl test cmd: "stop-task test -s mysql-replica-02"
run tidb sql failed 1-th time, retry later
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl_optimistic/conf/double-source-optimistic.yaml -s mysql-replica-02"
check diff successfully
dmctl test cmd: "stop-task test"
[Sun Apr 28 15:58:08 CST 2024] <<<<<< finish DM-STOP_TASK_FOR_A_SOURCE optimistic >>>>>>
[Sun Apr 28 15:58:08 CST 2024] <<<<<< start DM-UPDATE_BA_ROUTE optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl_optimistic/conf/double-source-optimistic.yaml --remove-meta"
dmctl test cmd: "query-status test"
got=2 expected=2
run tidb sql failed 1-th time, retry later
dmctl test cmd: "stop-task test"
dmctl test cmd: "start-task /tmp/dm_test/shardddl_optimistic/task.yaml"
dmctl test cmd: "show-ddl-locks"
got=1 expected=1
run tidb sql failed 1-th time, retry later
run tidb sql failed 1-th time, retry later
dmctl test cmd: "stop-task test"
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl_optimistic/conf/double-source-optimistic.yaml"
dmctl test cmd: "query-status test"
got=2 expected=2
dmctl test cmd: "resume-task test"
check diff successfully
dmctl test cmd: "stop-task test"
[Sun Apr 28 15:58:19 CST 2024] <<<<<< finish DM-UPDATE_BA_ROUTE optimistic >>>>>>
[Sun Apr 28 15:58:19 CST 2024] <<<<<< start DM-CREATE_DROP_TABLE optimistic >>>>>>
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/shardddl_optimistic/conf/double-source-optimistic.yaml --remove-meta"
dmctl test cmd: "query-status test"
got=2 expected=2
run tidb sql failed 1-th time, retry later
run tidb sql failed 1-th time, retry later
dmctl test cmd: "show-ddl-locks"
dmctl test cmd: "stop-task test"
[Sun Apr 28 15:58:25 CST 2024] <<<<<< finish DM-CREATE_DROP_TABLE optimistic >>>>>>
1 dm-master alive
2 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Sun Apr 28 15:58:30 CST 2024] <<<<<< test case shardddl_optimistic success! >>>>>>
start running case: [slow_relay_writer] script: [/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/slow_relay_writer/run.sh]
Running test /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/slow_relay_writer/run.sh...
Verbose mode = false
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Sun Apr 28 15:58:31 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/slow_relay_writer/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8261 is alive
[Sun Apr 28 15:58:32 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/slow_relay_writer/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/slow_relay_writer/source1.yaml"
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/slow_relay_writer/conf/dm-task.yaml "
check diff successfully
start incremental_data
finish incremental_data
dmctl test cmd: "query-status test"
got=1 expected=1
check diff successfully
1 dm-master alive
1 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Sun Apr 28 15:58:50 CST 2024] <<<<<< test case slow_relay_writer success! >>>>>>
start running case: [sql_mode] script: [/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sql_mode/run.sh]
Running test /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sql_mode/run.sh...
Verbose mode = false
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Sun Apr 28 15:58:50 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sql_mode/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8261 is alive
[Sun Apr 28 15:58:51 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sql_mode/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
[Sun Apr 28 15:58:52 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sql_mode/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/sql_mode/source1.yaml"
dmctl test cmd: "operate-source create /tmp/dm_test/sql_mode/source2.yaml"
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sql_mode/conf/dm-task.yaml "
check diff successfully
check diff successfully
check diff successfully
check diff successfully
check diff successfully
ERROR 1146 (42S02) at line 1: Table 'sql_mode.t0' doesn't exist
run tidb sql failed 1-th time, retry later
1 dm-master alive
2 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Sun Apr 28 15:59:04 CST 2024] <<<<<< test case sql_mode success! >>>>>>
start running case: [sync_collation] script: [/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sync_collation/run.sh]
Running test /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sync_collation/run.sh...
Verbose mode = false
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Sun Apr 28 15:59:04 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sync_collation/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8261 is alive
[Sun Apr 28 15:59:05 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sync_collation/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
[Sun Apr 28 15:59:06 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/sync_collation/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/sync_collation/source1.yaml"
dmctl test cmd: "operate-source create /tmp/dm_test/sync_collation/source2.yaml"
prepare data
start task
dmctl test cmd: "start-task /tmp/dm_test/sync_collation/dm-task.yaml --remove-meta"
check full phase
prepare incremental data
check incremental phase
run tidb sql failed 1-th time, retry later
dmctl test cmd: "stop-task sync_collation"
prepare data for full phase error test
dmctl test cmd: "start-task /tmp/dm_test/sync_collation/dm-task.yaml --remove-meta"
check full phase error
dmctl test cmd: "query-status sync_collation"
got=1 expected=1
dmctl test cmd: "stop-task sync_collation"
prepare data for incremental phase error test
dmctl test cmd: "start-task /tmp/dm_test/sync_collation/dm-task.yaml --remove-meta"
check incremental phase error
dmctl test cmd: "query-status sync_collation"
got=1 expected=1
1 dm-master alive
2 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Sun Apr 28 15:59:20 CST 2024] <<<<<< test case sync_collation success! >>>>>>
start running case: [tracker_ignored_ddl] script: [/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/tracker_ignored_ddl/run.sh]
Running test /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/tracker_ignored_ddl/run.sh...
Verbose mode = false
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Sun Apr 28 15:59:20 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/tracker_ignored_ddl/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
rpc addr 127.0.0.1:8261 is alive
[Sun Apr 28 15:59:22 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/tracker_ignored_ddl/conf/dm-worker.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/tracker_ignored_ddl/source1.yaml"
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/tracker_ignored_ddl/conf/dm-task.yaml "
check diff successfully
increment1 check success
dmctl test cmd: "query-status test"
got=1 expected=1
dmctl test cmd: "resume-task test"
dmctl test cmd: "resume-task test"
got=2 expected=2
dmctl test cmd: "query-status test"
got=2 expected=2
increment2 check success
dmctl test cmd: "stop-relay -s mysql-replica-01"
dmctl test cmd: "query-status test"
got=1 expected=1
dmctl test cmd: "stop-task test"
dmctl test cmd: "operate-source stop /tmp/dm_test/tracker_ignored_ddl/source1.yaml"
[Sun Apr 28 15:59:28 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/tracker_ignored_ddl/conf/dm-master.toml >>>>>>
rpc addr 127.0.0.1:8261 is alive
[Sun Apr 28 15:59:28 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/tracker_ignored_ddl/conf/dm-worker.toml >>>>>>
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /tmp/dm_test/tracker_ignored_ddl/source1.yaml"
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/tracker_ignored_ddl/conf/dm-task.yaml "
check diff successfully
increment1 check success
dmctl test cmd: "query-status test"
got=1 expected=1
dmctl test cmd: "resume-task test"
dmctl test cmd: "resume-task test"
got=2 expected=2
dmctl test cmd: "query-status test"
got=2 expected=2
increment2 check success
dmctl test cmd: "stop-relay -s mysql-replica-01"
dmctl test cmd: "query-status test"
got=1 expected=1
dmctl test cmd: "stop-task test"
dmctl test cmd: "operate-source stop /tmp/dm_test/tracker_ignored_ddl/source1.yaml"
1 dm-master alive
1 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Sun Apr 28 15:59:39 CST 2024] <<<<<< test case tracker_ignored_ddl success! >>>>>>
start running case: [validator_basic] script: [/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/validator_basic/run.sh]
Running test /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/validator_basic/run.sh...
Verbose mode = false
--> full mode, check we validate different data types(gtid=false, relay=false)
0 dm-master alive
0 dm-worker alive
0 dm-syncer alive
process dm-master.test already exit
process dm-worker.test already exit
process dm-syncer.test already exit
[Sun Apr 28 15:59:39 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8261 is alive
[Sun Apr 28 15:59:40 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/source1.yaml"
[Sun Apr 28 15:59:43 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/dm-task-standalone.yaml --remove-meta"
dmctl test cmd: "config source mysql-replica-01"
got=1 expected=1
got=1 expected=1
dmctl test cmd: "validation status test"
got=1 expected=1
got=0 expected=1
command: validation status test pendingRowsStatus": "insert\/update\/delete: 0\/0\/0 count: 0 != expected: 1, failed the 0-th time, will retry again
got=1 expected=1
got=1 expected=1
got=1 expected=1
--> full mode, check we validate different data types(gtid=false, relay=true)
1 dm-master alive
2 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Sun Apr 28 15:59:52 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8261 is alive
[Sun Apr 28 15:59:53 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/source1.yaml"
[Sun Apr 28 15:59:55 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/dm-task-standalone.yaml --remove-meta"
dmctl test cmd: "config source mysql-replica-01"
got=1 expected=1
got=1 expected=1
dmctl test cmd: "validation status test"
got=1 expected=1
got=0 expected=1
command: validation status test pendingRowsStatus": "insert\/update\/delete: 0\/0\/0 count: 0 != expected: 1, failed the 0-th time, will retry again
got=1 expected=1
got=1 expected=1
got=1 expected=1
--> full mode, check we validate different data types(gtid=true, relay=false)
1 dm-master alive
2 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Sun Apr 28 16:00:05 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
rpc addr 127.0.0.1:8261 is alive
[Sun Apr 28 16:00:06 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/source1.yaml"
[Sun Apr 28 16:00:07 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/dm-task-standalone.yaml --remove-meta"
dmctl test cmd: "config source mysql-replica-01"
got=1 expected=1
got=1 expected=1
dmctl test cmd: "validation status test"
got=1 expected=1
got=0 expected=1
command: validation status test pendingRowsStatus": "insert\/update\/delete: 0\/0\/0 count: 0 != expected: 1, failed the 0-th time, will retry again
got=1 expected=1
got=1 expected=1
got=1 expected=1
--> full mode, check we validate different data types(gtid=true, relay=true)
1 dm-master alive
2 dm-worker alive
0 dm-syncer alive
wait process dm-master.test exit...
wait process dm-master.test exit...
process dm-master.test already exit
wait process dm-worker.test exit...
wait process dm-worker.test exit...
wait process dm-worker.test exit...
process dm-worker.test already exit
process dm-syncer.test already exit
[Sun Apr 28 16:00:17 CST 2024] <<<<<< START DM-MASTER on port 8261, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/dm-master.toml >>>>>>
wait for rpc addr 127.0.0.1:8261 alive the 1-th time
wait for rpc addr 127.0.0.1:8261 alive the 2-th time
rpc addr 127.0.0.1:8261 is alive
[Sun Apr 28 16:00:19 CST 2024] <<<<<< START DM-WORKER on port 8262, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/dm-worker1.toml >>>>>>
wait for rpc addr 127.0.0.1:8262 alive the 1-th time
rpc addr 127.0.0.1:8262 is alive
dmctl test cmd: "operate-source create /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/source1.yaml"
[Sun Apr 28 16:00:21 CST 2024] <<<<<< START DM-WORKER on port 8263, config: /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/dm-worker2.toml >>>>>>
wait for rpc addr 127.0.0.1:8263 alive the 1-th time
Cancelling nested steps due to timeout
Sending interrupt signal to process
Killing processes
rpc addr 127.0.0.1:8263 is alive
dmctl test cmd: "start-task /home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/validator_basic/conf/dm-task-standalone.yaml --remove-meta"
kill finished with exit code 0
Sending interrupt signal to process
Killing processes
restore config
make: *** [dm_integration_test_in_group] Terminated
/home/jenkins/agent/workspace/pingcap/tiflow/release-7.5/pull_dm_integration_test/tiflow/dm/tests/validator_basic/run.sh: line 53: 79661 Terminated              run_dm_ctl $WORK_DIR "127.0.0.1:$MASTER_PORT" "start-task $task_conf $remove_meta" "\"result\": true" 2 "\"source\": \"$SOURCE_ID1\"" 1
restore time_zone
ERROR 2003 (HY000): Can't connect to MySQL server on '127.0.0.1' (111)
script returned exit code 143
kill finished with exit code 0
[Pipeline] }
Cache not saved (inner-step execution failed)
[Pipeline] // cache
[Pipeline] }
[Pipeline] // dir
[Pipeline] }
[Pipeline] // withCredentials
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
Failed in branch Matrix - TEST_GROUP = 'G11'
[Pipeline] // parallel
[Pipeline] }
[Pipeline] // stage
[Pipeline] }
[Pipeline] // timeout
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // container
[Pipeline] }
[Pipeline] // withEnv
[Pipeline] }
[Pipeline] // node
[Pipeline] }
[Pipeline] // podTemplate
[Pipeline] End of Pipeline
Timeout has been exceeded
org.jenkinsci.plugins.workflow.actions.ErrorAction$ErrorId: 30abf135-8a2b-424b-aaff-c849db090584
Finished: ABORTED